diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java index f3d3b1e..d524310 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/LocalContainerLauncher.java @@ -50,7 +50,7 @@ import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerRemoteLaunchEvent; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.service.AbstractService; @@ -129,7 +129,7 @@ public void handle(ContainerLauncherEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); // FIXME? YarnException is "for runtime exceptions only" + throw new YarnRuntimeException(e); // FIXME? YarnRuntimeException is "for runtime exceptions only" } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java index 38a4345..e30d414 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/TaskAttemptListenerImpl.java @@ -52,7 +52,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.CompositeService; /** @@ -134,7 +134,7 @@ protected void startRpcServer() { server.start(); this.address = NetUtils.getConnectAddress(server); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java index b4873de..e4f6ffa 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryCopyService.java @@ -30,7 +30,7 @@ import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.CompositeService; @@ -76,7 +76,7 @@ public void start() { //TODO should we parse on a background thread??? parse(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } super.start(); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java index 296559e..6c671bf 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java @@ -52,7 +52,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -131,7 +131,7 @@ public void init(Configuration conf) { JobHistoryUtils.getHistoryIntermediateDoneDirForUser(conf); } catch (IOException e) { LOG.error("Failed while getting the configured log directories", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check for the existence of the history staging dir. Maybe create it. @@ -144,7 +144,7 @@ public void init(Configuration conf) { } catch (IOException e) { LOG.error("Failed while checking for/creating history staging path: [" + stagingDirPath + "]", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check for the existence of intermediate done dir. @@ -178,13 +178,13 @@ public void init(Configuration conf) { + ". Either set to true or pre-create this directory with" + " appropriate permissions"; LOG.error(message); - throw new YarnException(message); + throw new YarnRuntimeException(message); } } } catch (IOException e) { LOG.error("Failed checking for the existance of history intermediate " + "done directory: [" + doneDirPath + "]"); - throw new YarnException(e); + throw new YarnRuntimeException(e); } //Check/create user directory under intermediate done dir. @@ -196,7 +196,7 @@ public void init(Configuration conf) { } catch (IOException e) { LOG.error("Error creating user intermediate history done directory: [ " + doneDirPrefixPath + "]", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Maximum number of unflushed completion-events that can stay in the queue @@ -457,7 +457,7 @@ public void handle(JobHistoryEvent event) { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -479,7 +479,7 @@ protected void handleEvent(JobHistoryEvent event) { } catch (IOException ioe) { LOG.error("Error JobHistoryEventHandler in handleEvent: " + event, ioe); - throw new YarnException(ioe); + throw new YarnRuntimeException(ioe); } } @@ -501,7 +501,7 @@ protected void handleEvent(JobHistoryEvent event) { } catch (IOException e) { LOG.error("Error writing History Event: " + event.getHistoryEvent(), e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) { @@ -523,7 +523,7 @@ protected void handleEvent(JobHistoryEvent event) { mi.getJobIndexInfo().setJobStatus(JobState.SUCCEEDED.toString()); closeEventWriter(event.getJobID()); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -539,7 +539,7 @@ protected void handleEvent(JobHistoryEvent event) { mi.getJobIndexInfo().setJobStatus(jucEvent.getStatus()); closeEventWriter(event.getJobID()); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java index 2a3f9c1..eb4e6c3 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java @@ -113,7 +113,7 @@ import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; @@ -301,7 +301,7 @@ public void init(final Configuration conf) { } } } catch (IOException e) { - throw new YarnException("Error while initializing", e); + throw new YarnRuntimeException("Error while initializing", e); } if (errorHappenedShutDown) { @@ -442,7 +442,7 @@ private OutputCommitter createOutputCommitter(Configuration conf) { .getOutputFormatClass(), conf); committer = outputFormat.getOutputCommitter(taskContext); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } else { committer = ReflectionUtils.newInstance(conf.getClass( @@ -623,7 +623,7 @@ protected void downloadTokensAndSetupUGI(Configuration conf) { + jobTokenFile); currentUser.addCredentials(fsTokens); // For use by AppMaster itself. } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -662,19 +662,19 @@ protected Speculator createSpeculator(Configuration conf, AppContext context) { } catch (InstantiationException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (IllegalAccessException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (InvocationTargetException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (NoSuchMethodException ex) { LOG.error("Can't make a speculator -- check " + MRJobConfig.MR_AM_JOB_SPECULATOR, ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java index 93326cd..123b1f9 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/commit/CommitterEventHandler.java @@ -48,7 +48,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -103,7 +103,7 @@ public void init(Configuration conf) { endCommitSuccessFile = MRApps.getEndJobCommitSuccessFile(conf, user, jobId); endCommitFailureFile = MRApps.getEndJobCommitFailureFile(conf, user, jobId); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -143,7 +143,7 @@ public void handle(CommitterEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -218,7 +218,7 @@ public void run() { handleTaskAbort((CommitterTaskAbortEvent) event); break; default: - throw new YarnException("Unexpected committer event " + throw new YarnRuntimeException("Unexpected committer event " + event.toString()); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java index 367b028..4645e86 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java @@ -113,7 +113,7 @@ import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; @@ -1466,7 +1466,7 @@ private void createReduceTasks(JobImpl job) { job.conf, job.remoteJobSubmitDir); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } return allTaskSplitMetaInfo; } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java index 5fb4f89..2d9f9d4 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java @@ -111,7 +111,7 @@ import org.apache.hadoop.util.StringInterner; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.Container; @@ -733,7 +733,7 @@ private static ContainerLaunchContext createCommonContainerLaunchContext( initialAppClasspath); } } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Shell @@ -1207,7 +1207,7 @@ private static TaskAttemptState getExternalState( case SUCCEEDED: return TaskAttemptState.SUCCEEDED; default: - throw new YarnException("Attempt to convert invalid " + throw new YarnRuntimeException("Attempt to convert invalid " + "stateMachineTaskAttemptState to externalTaskAttemptState: " + smState); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java index 5911f3c..25b0f85 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java @@ -48,7 +48,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerResponse; @@ -423,7 +423,7 @@ public void handle(ContainerLauncherEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java index 29ba7da..96cb355 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java @@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.mapreduce.v2.app.rm.RMCommunicator; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; @@ -110,7 +110,7 @@ protected synchronized void heartbeat() throws Exception { LOG.error("Could not contact RM after " + retryInterval + " milliseconds."); eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.INTERNAL_ERROR)); - throw new YarnException("Could not contact RM after " + + throw new YarnRuntimeException("Could not contact RM after " + retryInterval + " milliseconds."); } // Throw this up to the caller, which may decide to ignore it and @@ -123,7 +123,7 @@ protected synchronized void heartbeat() throws Exception { // this application must clean itself up. eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); - throw new YarnException("Resource Manager doesn't recognize AttemptId: " + + throw new YarnRuntimeException("Resource Manager doesn't recognize AttemptId: " + this.getContext().getApplicationID()); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java index 0e4cfe0..369b525 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java @@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; @@ -163,7 +163,7 @@ protected void register() { LOG.info("maxContainerCapability: " + maxContainerCapability.getMemory()); } catch (Exception are) { LOG.error("Exception while registering", are); - throw new YarnException(are); + throw new YarnRuntimeException(are); } } @@ -237,7 +237,7 @@ public void run() { Thread.sleep(rmPollInterval); try { heartbeat(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { LOG.error("Error communicating with RM: " + e.getMessage() , e); return; } catch (Exception e) { @@ -273,7 +273,7 @@ protected AMRMProtocol createSchedulerProxy() { try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // CurrentUser should already have AMToken loaded. diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java index a03dfdd..d21c6fa 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java @@ -58,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptKillEvent; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ContainerExitStatus; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.Container; @@ -274,7 +274,7 @@ public void handle(ContainerAllocatorEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -563,7 +563,7 @@ public void rampDownReduces(int rampDown) { LOG.error("Could not contact RM after " + retryInterval + " milliseconds."); eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.INTERNAL_ERROR)); - throw new YarnException("Could not contact RM after " + + throw new YarnRuntimeException("Could not contact RM after " + retryInterval + " milliseconds."); } // Throw this up to the caller, which may decide to ignore it and @@ -575,7 +575,7 @@ public void rampDownReduces(int rampDown) { // this application must clean itself up. eventHandler.handle(new JobEvent(this.getJob().getID(), JobEventType.JOB_AM_REBOOT)); - throw new YarnException("Resource Manager doesn't recognize AttemptId: " + + throw new YarnRuntimeException("Resource Manager doesn't recognize AttemptId: " + this.getContext().getApplicationID()); } int newHeadRoom = getAvailableResources() != null ? getAvailableResources().getMemory() : 0; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java index d99a9dd..64ce722 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java @@ -36,14 +36,14 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.client.ClientService; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.util.BuilderUtils; @@ -138,7 +138,7 @@ public void init(Configuration conf) { MRJobConfig.DEFAULT_MR_AM_IGNORE_BLACKLISTING_BLACKLISTED_NODE_PERCENT); LOG.info("maxTaskFailuresPerNode is " + maxTaskFailuresPerNode); if (blacklistDisablePercent < -1 || blacklistDisablePercent > 100) { - throw new YarnException("Invalid blacklistDisablePercent: " + throw new YarnRuntimeException("Invalid blacklistDisablePercent: " + blacklistDisablePercent + ". Should be an integer between 0 and 100 or -1 to disabled"); } @@ -153,7 +153,7 @@ protected AllocateResponse makeRemoteRequest() throws IOException { AllocateResponse allocateResponse; try { allocateResponse = scheduler.allocate(allocateRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } lastResponseID = allocateResponse.getResponseId(); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java index b2e437b..3ecb94e 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java @@ -48,7 +48,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus; import org.apache.hadoop.yarn.Clock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.service.AbstractService; @@ -129,16 +129,16 @@ public DefaultSpeculator(Configuration conf, AppContext context, Clock clock) { estimator.contextualize(conf, context); } catch (InstantiationException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (IllegalAccessException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (InvocationTargetException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } catch (NoSuchMethodException ex) { LOG.error("Can't make a speculation runtime extimator", ex); - throw new YarnException(ex); + throw new YarnRuntimeException(ex); } return estimator; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java index 1e671aa..e9a1b1c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java @@ -58,7 +58,7 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; @@ -102,8 +102,8 @@ public static Job getJobFromJobIdString(String jid, AppContext appCtx) throws No Job job; try { jobId = MRApps.toJobID(jid); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -132,8 +132,8 @@ public static Task getTaskFromTaskIdString(String tid, Job job) throws NotFoundE Task task; try { taskID = MRApps.toTaskID(tid); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -165,8 +165,8 @@ public static TaskAttempt getTaskAttemptFromTaskAttemptString(String attId, Task TaskAttempt ta; try { attemptId = MRApps.toTaskAttemptID(attId); - } catch (YarnException e) { - // TODO: after MAPREDUCE-2793 YarnException is probably not expected here + } catch (YarnRuntimeException e) { + // TODO: after MAPREDUCE-2793 YarnRuntimeException is probably not expected here // anymore but keeping it for now just in case other stuff starts failing. // Also, the webservice should ideally return BadRequest (HTTP:400) when // the id is malformed instead of NotFound (HTTP:404). The webserver on @@ -304,7 +304,7 @@ public TasksInfo getJobTasks(@Context HttpServletRequest hsr, if (type != null && !type.isEmpty()) { try { ttype = MRApps.taskType(type); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { throw new BadRequestException("tasktype must be either m or r"); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java index bfa6ca6..fdb6c61 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/jobhistory/TestJobHistoryEventHandler.java @@ -40,7 +40,7 @@ import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -251,7 +251,7 @@ private String setupTestWorkDir() { return testWorkDir.getAbsolutePath(); } catch (Exception e) { LOG.warn("Could not cleanup", e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java index ebf38b9..bcc5fbb 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java @@ -85,7 +85,7 @@ import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; @@ -196,7 +196,7 @@ public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId, FileContext.getLocalFSFileContext().delete(testAbsPath, true); } catch (Exception e) { LOG.warn("COULD NOT CLEANUP: " + testAbsPath, e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } } @@ -214,7 +214,7 @@ public void init(Configuration conf) { FileSystem fs = getFileSystem(conf); fs.mkdirs(stagingDir); } catch (Exception e) { - throw new YarnException("Error creating staging dir", e); + throw new YarnRuntimeException("Error creating staging dir", e); } super.init(conf); @@ -403,7 +403,7 @@ protected Job createJob(Configuration conf, JobStateInternal forcedState, try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java index a6dc566..4b6b0e3 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java @@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator; import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent; import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; @@ -126,7 +126,7 @@ public void handle(ContainerAllocatorEvent event) { try { eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @Override diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java index fa7130a..c71a315 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java @@ -58,7 +58,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -451,7 +451,7 @@ protected void downloadTokensAndSetupUGI(Configuration conf) { try { this.currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java index 41e88a8..3d1c80b 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java @@ -74,7 +74,7 @@ import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.SystemClock; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -1430,7 +1430,7 @@ public void sendFailure(ContainerFailedEvent f) { super.heartbeat(); } catch (Exception e) { LOG.error("error in heartbeat ", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } List result diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java index 8a0cf16..a8cedc6 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java @@ -49,7 +49,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -292,7 +292,7 @@ protected Job createJob(Configuration conf, JobStateInternal forcedState, try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Job newJob = new TestJob(getJobId(), getAttemptID(), conf, getDispatcher().getEventHandler(), diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java index 167c291..283ff68 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/commit/TestCommitterEventHandler.java @@ -60,7 +60,7 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.JobCommitFailedEvent; import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Event; @@ -295,7 +295,7 @@ public void testFailure() throws Exception { when(mockContext.getEventHandler()).thenReturn(waitForItHandler); when(mockContext.getClock()).thenReturn(mockClock); - doThrow(new YarnException("Intentional Failure")).when(mockCommitter) + doThrow(new YarnRuntimeException("Intentional Failure")).when(mockCommitter) .commitJob(any(JobContext.class)); handler.init(conf); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java index bd50b9c..1f6cc2b 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncherImpl.java @@ -60,7 +60,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -460,14 +460,14 @@ public GetContainerStatusResponse getContainerStatus( } @SuppressWarnings("serial") - private static class ContainerException extends YarnRemoteException { + private static class ContainerException extends YarnException { public ContainerException(String message) { super(message); } @Override - public YarnRemoteException getCause() { + public YarnException getCause() { return null; } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java index a0ed206..7e90a44 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/local/TestLocalContainerAllocator.java @@ -30,14 +30,14 @@ import org.apache.hadoop.mapreduce.v2.app.client.ClientService; import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Assert; @@ -56,13 +56,13 @@ public void testRMConnectionRetry() throws Exception { try { lca.heartbeat(); Assert.fail("heartbeat was supposed to throw"); - } catch (YarnRemoteException e) { - // YarnRemoteException is expected + } catch (YarnException e) { + // YarnException is expected } finally { lca.stop(); } - // verify YarnException is thrown when the retry interval has expired + // verify YarnRuntimeException is thrown when the retry interval has expired conf.setLong(MRJobConfig.MR_AM_TO_RM_WAIT_INTERVAL_MS, 0); lca = new StubbedLocalContainerAllocator(); lca.init(conf); @@ -70,8 +70,8 @@ public void testRMConnectionRetry() throws Exception { try { lca.heartbeat(); Assert.fail("heartbeat was supposed to throw"); - } catch (YarnException e) { - // YarnException is expected + } catch (YarnRuntimeException e) { + // YarnRuntimeException is expected } finally { lca.stop(); } @@ -100,7 +100,7 @@ protected AMRMProtocol createSchedulerProxy() { try { when(scheduler.allocate(isA(AllocateRequest.class))) .thenThrow(RPCUtil.getRemoteException(new IOException("forcefail"))); - } catch (YarnRemoteException e) { + } catch (YarnException e) { } catch (IOException e) { } return scheduler; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java index 892eb87..6b610b2 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java @@ -41,7 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskState; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.util.MRApps; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; @@ -98,7 +98,7 @@ private static long toClusterTimeStamp(String identifier) { case REDUCE: return org.apache.hadoop.mapreduce.TaskType.REDUCE; default: - throw new YarnException("Unrecognized task type: " + taskType); + throw new YarnRuntimeException("Unrecognized task type: " + taskType); } } @@ -110,7 +110,7 @@ private static long toClusterTimeStamp(String identifier) { case REDUCE: return TaskType.REDUCE; default: - throw new YarnException("Unrecognized task type: " + taskType); + throw new YarnRuntimeException("Unrecognized task type: " + taskType); } } @@ -145,7 +145,7 @@ public static TaskAttemptState toYarn( case UNASSIGNED: return TaskAttemptState.STARTING; default: - throw new YarnException("Unrecognized State: " + state); + throw new YarnRuntimeException("Unrecognized State: " + state); } } @@ -164,7 +164,7 @@ public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) { case CLEANUP: return Phase.CLEANUP; } - throw new YarnException("Unrecognized Phase: " + phase); + throw new YarnRuntimeException("Unrecognized Phase: " + phase); } public static TaskCompletionEvent[] fromYarn( @@ -202,7 +202,7 @@ public static TaskCompletionEvent fromYarn( case TIPFAILED: return TaskCompletionEvent.Status.TIPFAILED; } - throw new YarnException("Unrecognized status: " + newStatus); + throw new YarnRuntimeException("Unrecognized status: " + newStatus); } public static org.apache.hadoop.mapred.TaskAttemptID fromYarn( @@ -328,7 +328,7 @@ public static int fromYarn(JobState state) { case ERROR: return org.apache.hadoop.mapred.JobStatus.FAILED; } - throw new YarnException("Unrecognized job state: " + state); + throw new YarnRuntimeException("Unrecognized job state: " + state); } public static org.apache.hadoop.mapred.TIPStatus fromYarn( @@ -346,7 +346,7 @@ public static int fromYarn(JobState state) { case FAILED: return org.apache.hadoop.mapred.TIPStatus.FAILED; } - throw new YarnException("Unrecognized task state: " + state); + throw new YarnRuntimeException("Unrecognized task state: " + state); } public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) { @@ -408,7 +408,7 @@ public static State fromYarn(YarnApplicationState yarnApplicationState, case KILLED: return State.KILLED; } - throw new YarnException("Unrecognized application state: " + yarnApplicationState); + throw new YarnRuntimeException("Unrecognized application state: " + yarnApplicationState); } private static final String TT_NAME_PREFIX = "tracker_"; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index fef0902..24163a7 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -49,7 +49,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.yarn.ContainerLogAppender; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -97,7 +97,7 @@ public static String taskSymbol(TaskType type) { case MAP: return "m"; case REDUCE: return "r"; } - throw new YarnException("Unknown task type: "+ type.toString()); + throw new YarnRuntimeException("Unknown task type: "+ type.toString()); } public static enum TaskAttemptStateUI { @@ -126,7 +126,7 @@ public static TaskType taskType(String symbol) { // JDK 7 supports switch on strings if (symbol.equals("m")) return TaskType.MAP; if (symbol.equals("r")) return TaskType.REDUCE; - throw new YarnException("Unknown task symbol: "+ symbol); + throw new YarnRuntimeException("Unknown task symbol: "+ symbol); } public static TaskAttemptStateUI taskAttemptState(String attemptStateStr) { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java index c086840..84b827e 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java @@ -56,7 +56,7 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenResponse; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.junit.Test; @@ -84,7 +84,7 @@ private void testPbServerFactory() { RpcServerFactoryPBImpl.get().getServer( MRClientProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete server"); } finally { @@ -110,12 +110,12 @@ private void testPbClientFactory() { MRClientProtocol client = null; try { client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete server"); } finally { diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java index 08e4b94..7e6a672 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java @@ -20,7 +20,7 @@ import junit.framework.Assert; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest; @@ -38,7 +38,7 @@ public void testPbRecordFactory() { try { CounterGroup response = pbRecordFactory.newRecordInstance(CounterGroup.class); Assert.assertEquals(CounterGroupPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } @@ -46,7 +46,7 @@ public void testPbRecordFactory() { try { GetCountersRequest response = pbRecordFactory.newRecordInstance(GetCountersRequest.class); Assert.assertEquals(GetCountersRequestPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java index eb4e784..3a0e8f4 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CachedHistoryStorage.java @@ -37,7 +37,7 @@ import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; /** @@ -60,7 +60,7 @@ public void setHistoryFileManager(HistoryFileManager hsManager) { @SuppressWarnings("serial") @Override - public void init(Configuration conf) throws YarnException { + public void init(Configuration conf) throws YarnRuntimeException { LOG.info("CachedHistoryStorage Init"); loadedJobCacheSize = conf.getInt( @@ -94,7 +94,7 @@ private Job loadJob(HistoryFileInfo fileInfo) { loadedJobCache.put(job.getID(), job); return job; } catch (IOException e) { - throw new YarnException( + throw new YarnRuntimeException( "Could not find/load job: " + fileInfo.getJobId(), e); } } @@ -120,7 +120,7 @@ public Job getFullJob(JobId jobId) { } return result; } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -137,7 +137,7 @@ public Job getFullJob(JobId jobId) { } } catch (IOException e) { LOG.warn("Error trying to scan for all FileInfos", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } return result; } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java index 2f3c57d..cf67d85 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java @@ -60,7 +60,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.util.Records; @@ -333,12 +333,12 @@ protected synchronized void loadFullHistoryData(boolean loadTasks, historyFileAbsolute); this.jobInfo = parser.parse(); } catch (IOException e) { - throw new YarnException("Could not load history file " + throw new YarnRuntimeException("Could not load history file " + historyFileAbsolute, e); } IOException parseException = parser.getParseException(); if (parseException != null) { - throw new YarnException( + throw new YarnRuntimeException( "Could not parse history file " + historyFileAbsolute, parseException); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java index a1887f7..8bef996 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java @@ -59,7 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; import com.google.common.annotations.VisibleForTesting; @@ -421,7 +421,7 @@ public void init(Configuration conf) { mkdir(doneDirFc, doneDirPrefixPath, new FsPermission( JobHistoryUtils.HISTORY_DONE_DIR_PERMISSION)); } catch (IOException e) { - throw new YarnException("Error creating done directory: [" + throw new YarnRuntimeException("Error creating done directory: [" + doneDirPrefixPath + "]", e); } @@ -437,7 +437,7 @@ public void init(Configuration conf) { JobHistoryUtils.HISTORY_INTERMEDIATE_DONE_DIR_PERMISSIONS.toShort())); } catch (IOException e) { LOG.info("error creating done directory on dfs " + e); - throw new YarnException("Error creating intermediate done directory: [" + throw new YarnRuntimeException("Error creating intermediate done directory: [" + intermediateDoneDirPath + "]", e); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java index 0f9cf26..4ad42ad 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistory.java @@ -40,7 +40,7 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; @@ -71,7 +71,7 @@ private HistoryFileManager hsManager = null; @Override - public void init(Configuration conf) throws YarnException { + public void init(Configuration conf) throws YarnRuntimeException { LOG.info("JobHistory Init"); this.conf = conf; this.appID = ApplicationId.newInstance(0, 0); @@ -87,7 +87,7 @@ public void init(Configuration conf) throws YarnException { try { hsManager.initExisting(); } catch (IOException e) { - throw new YarnException("Failed to intialize existing directories", e); + throw new YarnRuntimeException("Failed to intialize existing directories", e); } storage = ReflectionUtils.newInstance(conf.getClass( diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index f76f030..44eb217 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -32,7 +32,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; @@ -73,7 +73,7 @@ public synchronized void init(Configuration conf) { try { doSecureLogin(conf); } catch(IOException ie) { - throw new YarnException("History Server Failed to login", ie); + throw new YarnRuntimeException("History Server Failed to login", ie); } jobHistoryService = new JobHistory(); historyContext = (HistoryContext)jobHistoryService; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java index 430c35c..e84894c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java @@ -55,7 +55,7 @@ import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo; import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo; import org.apache.hadoop.mapreduce.v2.util.MRApps; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; import org.apache.hadoop.yarn.webapp.WebApp; @@ -255,7 +255,7 @@ public TasksInfo getJobTasks(@PathParam("jobid") String jid, if (type != null && !type.isEmpty()) { try { ttype = MRApps.taskType(type); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { throw new BadRequestException("tasktype must be either m or r"); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java index af9cfd2..10f8efd 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java @@ -50,7 +50,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.NodeId; @@ -111,7 +111,7 @@ jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks, numAttempts, hasFailedTasks); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } partialJobs = jobs.partial; fullJobs = jobs.full; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java index 959147e..40390e8 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java @@ -45,7 +45,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.ClusterInfo; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.event.EventHandler; @@ -92,7 +92,7 @@ try { jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } partialJobs = jobs.partial; fullJobs = jobs.full; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java index 8db7e97..46c266a 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientCache.java @@ -33,7 +33,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.ipc.YarnRPC; public class ClientCache { @@ -60,7 +60,7 @@ public synchronized ClientServiceDelegate getClient(JobID jobId) { hsProxy = instantiateHistoryProxy(); } catch (IOException e) { LOG.warn("Could not connect to History server.", e); - throw new YarnException("Could not connect to History server.", e); + throw new YarnRuntimeException("Could not connect to History server.", e); } } ClientServiceDelegate client = cache.get(jobId); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java index 1e5cd30..f49370e 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java @@ -65,12 +65,12 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.YarnApplicationState; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -143,7 +143,7 @@ private MRClientProtocol getProxy() throws IOException { ApplicationReport application = null; try { application = rm.getApplicationReport(appId); - } catch (YarnRemoteException e2) { + } catch (YarnException e2) { throw new IOException(e2); } if (application != null) { @@ -212,11 +212,11 @@ public MRClientProtocol run() throws IOException { Thread.sleep(2000); } catch (InterruptedException e1) { LOG.warn("getProxy() call interruped", e1); - throw new YarnException(e1); + throw new YarnRuntimeException(e1); } try { application = rm.getApplicationReport(appId); - } catch (YarnRemoteException e1) { + } catch (YarnException e1) { throw new IOException(e1); } if (application == null) { @@ -226,8 +226,8 @@ public MRClientProtocol run() throws IOException { } } catch (InterruptedException e) { LOG.warn("getProxy() call interruped", e); - throw new YarnException(e); - } catch (YarnRemoteException e) { + throw new YarnRuntimeException(e); + } catch (YarnException e) { throw new IOException(e); } } @@ -296,9 +296,9 @@ private synchronized Object invoke(String method, Class argClass, try { methodOb = MRClientProtocol.class.getMethod(method, argClass); } catch (SecurityException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException("Method name mismatch", e); + throw new YarnRuntimeException("Method name mismatch", e); } int maxRetries = this.conf.getInt( MRJobConfig.MR_CLIENT_MAX_RETRIES, @@ -308,7 +308,7 @@ private synchronized Object invoke(String method, Class argClass, try { return methodOb.invoke(getProxy(), args); } catch (InvocationTargetException e) { - // Will not throw out YarnRemoteException anymore + // Will not throw out YarnException anymore LOG.debug("Failed to contact AM/History for job " + jobId + " retrying..", e.getTargetException()); // Force reconnection by setting the proxy to null. diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java index b893c92..dee5d18 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ResourceMgrDelegate.java @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ProtoUtils; public class ResourceMgrDelegate extends YarnClientImpl { @@ -68,7 +68,7 @@ public ResourceMgrDelegate(YarnConfiguration conf) { InterruptedException { try { return TypeConverter.fromYarnNodes(super.getNodeReports()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -76,7 +76,7 @@ public ResourceMgrDelegate(YarnConfiguration conf) { public JobStatus[] getAllJobs() throws IOException, InterruptedException { try { return TypeConverter.fromYarnApps(super.getApplicationList(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -98,7 +98,7 @@ public ClusterMetrics getClusterMetrics() throws IOException, metrics.getNumNodeManagers() * 2, 1, metrics.getNumNodeManagers(), 0, 0); return oldMetrics; - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -113,7 +113,7 @@ public Token getDelegationToken(Text renewer) throws IOException, try { return ProtoUtils.convertFromProtoFormat( super.getRMDelegationToken(renewer), rmAddress); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -127,7 +127,7 @@ public JobID getNewJobID() throws IOException, InterruptedException { this.application = super.getNewApplication(); this.applicationId = this.application.getApplicationId(); return TypeConverter.fromYarn(applicationId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -139,7 +139,7 @@ public QueueInfo getQueue(String queueName) throws IOException, super.getQueueInfo(queueName); return (queueInfo == null) ? null : TypeConverter.fromYarn(queueInfo, conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -149,7 +149,7 @@ public QueueInfo getQueue(String queueName) throws IOException, try { return TypeConverter.fromYarnQueueUserAclsInfo(super .getQueueAclsInfo()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -157,7 +157,7 @@ public QueueInfo getQueue(String queueName) throws IOException, public QueueInfo[] getQueues() throws IOException, InterruptedException { try { return TypeConverter.fromYarnQueueInfo(super.getAllQueues(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -166,7 +166,7 @@ public QueueInfo getQueue(String queueName) throws IOException, try { return TypeConverter.fromYarnQueueInfo(super.getRootQueueInfos(), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -176,7 +176,7 @@ public QueueInfo getQueue(String queueName) throws IOException, try { return TypeConverter.fromYarnQueueInfo(super.getChildQueueInfos(parent), this.conf); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 4d950cd..4686d9f 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -64,7 +64,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; @@ -79,7 +79,7 @@ import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.security.client.RMTokenSelector; @@ -287,7 +287,7 @@ public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) try { ts.writeTokenStorageFile(applicationTokensFile, conf); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // Construct necessary information to start the MR AM @@ -311,7 +311,7 @@ public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts) diagnostics); } return clientCache.getClient(jobId).getJobStatus(jobId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -561,7 +561,7 @@ public void killJob(JobID arg0) throws IOException, InterruptedException { if (status.getState() != JobStatus.State.RUNNING) { try { resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } return; @@ -589,7 +589,7 @@ public void killJob(JobID arg0) throws IOException, InterruptedException { if (status.getState() != JobStatus.State.KILLED) { try { resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java index e6a8d2a..e95c96c 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java @@ -68,7 +68,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse; @@ -404,7 +404,7 @@ public void start(Configuration conf) { address.getAddress(); hostNameResolved = InetAddress.getLocalHost(); } catch (UnknownHostException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } server = diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java index cb4043e..318f8f3 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; @@ -216,7 +216,7 @@ public void testReconnectOnAMRestart() throws IOException { getRunningApplicationReport(null, 0)).thenReturn( getRunningApplicationReport(null, 0)).thenReturn( getRunningApplicationReport("am2", 90)); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -286,7 +286,7 @@ public void testAMAccessDisabled() throws IOException { getRunningApplicationReport("am1", 78)).thenReturn( getRunningApplicationReport("am1", 78)).thenReturn( getFinishedApplicationReport()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -367,7 +367,7 @@ public void testRMDownRestoreForJobStatusBeforeGetAMReport() verify(rmDelegate, times(3)).getApplicationReport( any(ApplicationId.class)); Assert.assertNotNull(jobStatus); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -397,7 +397,7 @@ private void testRMDownForJobStatusBeforeGetAMReport(Configuration conf, } verify(rmDelegate, times(noOfRetries)).getApplicationReport( any(ApplicationId.class)); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } } @@ -448,7 +448,7 @@ private ResourceMgrDelegate getRMDelegate() throws IOException { ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class); try { when(rm.getApplicationReport(jobId.getAppId())).thenReturn(null); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } return rm; diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java index 5561633..85dcea9 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestNetworkedJob.java @@ -45,7 +45,7 @@ import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus; import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.junit.Test; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -181,12 +181,12 @@ public void testNetworkedJob() throws Exception { try { client.getSetupTaskReports(jobId); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { assertEquals(e.getMessage(), "Unrecognized task type: JOB_SETUP"); } try { client.getCleanupTaskReports(jobId); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { assertEquals(e.getMessage(), "Unrecognized task type: JOB_CLEANUP"); } assertEquals(client.getReduceTaskReports(jobId).length, 0); diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java index c2c17ae..6b60b36 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestResourceMgrDelegate.java @@ -38,7 +38,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; import org.junit.Test; import org.mockito.ArgumentCaptor; @@ -60,7 +60,7 @@ public void testGetRootQueues() throws IOException, InterruptedException { try { Mockito.when(applicationsManager.getQueueInfo(Mockito.any( GetQueueInfoRequest.class))).thenReturn(response); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } @@ -78,7 +78,7 @@ public synchronized void start() { try { Mockito.verify(applicationsManager).getQueueInfo( argument.capture()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } diff --git hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index 10d7a71..e1cf3d4 100644 --- hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -35,7 +35,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.util.JarFinder; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; @@ -100,7 +100,7 @@ public void init(Configuration conf) { Path doneDirPath = fc.makeQualified(new Path(doneDir)); fc.mkdir(doneDirPath, null, true); } catch (IOException e) { - throw new YarnException("Could not create staging directory. ", e); + throw new YarnRuntimeException("Could not create staging directory. ", e); } conf.set(MRConfig.MASTER_ADDRESS, "test"); // The default is local because of // which shuffle doesn't happen @@ -158,7 +158,7 @@ public void run() { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } //need to do this because historyServer.init creates a new Configuration getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java deleted file mode 100644 index 29279b6..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnException.java +++ /dev/null @@ -1,33 +0,0 @@ -/** -* Licensed to the Apache Software Foundation (ASF) under one -* or more contributor license agreements. See the NOTICE file -* distributed with this work for additional information -* regarding copyright ownership. The ASF licenses this file -* to you under the Apache License, Version 2.0 (the -* "License"); you may not use this file except in compliance -* with the License. You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -*/ - -package org.apache.hadoop.yarn; - -/** Base Yarn Exception. - * - * NOTE: All derivatives of this exception, which may be thrown by a remote - * service, must include a String only constructor for the exception to be - * unwrapped on the client. - */ -public class YarnException extends RuntimeException { - public YarnException(Throwable cause) { super(cause); } - public YarnException(String message) { super(message); } - public YarnException(String message, Throwable cause) { - super(message, cause); - } -} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java index e69de29..16ccf38 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/YarnRuntimeException.java @@ -0,0 +1,37 @@ +/** +* Licensed to the Apache Software Foundation (ASF) under one +* or more contributor license agreements. See the NOTICE file +* distributed with this work for additional information +* regarding copyright ownership. The ASF licenses this file +* to you under the Apache License, Version 2.0 (the +* "License"); you may not use this file except in compliance +* with the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.hadoop.yarn; + +import org.apache.hadoop.classification.InterfaceAudience.Private; + +/** Base Yarn Exception. + * + * NOTE: All derivatives of this exception, which may be thrown by a remote + * service, must include a String only constructor for the exception to be + * unwrapped on the client. + */ + +@Private +public class YarnRuntimeException extends RuntimeException { + public YarnRuntimeException(Throwable cause) { super(cause); } + public YarnRuntimeException(String message) { super(message); } + public YarnRuntimeException(String message, Throwable cause) { + super(message, cause); + } +} diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java index e6c8c66..dedd9a9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/AMRMProtocol.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between a live instance of ApplicationMaster @@ -58,12 +58,12 @@ * * @param request registration request * @return registration respose - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster( RegisterApplicationMasterRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by an ApplicationMaster to notify the @@ -78,12 +78,12 @@ public RegisterApplicationMasterResponse registerApplicationMaster( * * @param request completion request * @return completion response - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public FinishApplicationMasterResponse finishApplicationMaster( FinishApplicationMasterRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The main interface between an ApplicationMaster @@ -108,9 +108,9 @@ public FinishApplicationMasterResponse finishApplicationMaster( * * @param request allocation request * @return allocation response - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java index 6bef8cb..593a8ea 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ClientRMProtocol.java @@ -54,7 +54,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between clients and the ResourceManager @@ -79,13 +79,13 @@ * @param request request to get a new ApplicationId * @return response containing the new ApplicationId to be used * to submit an application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #submitApplication(SubmitApplicationRequest) */ public GetNewApplicationResponse getNewApplication( GetNewApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to submit a new application to the @@ -108,13 +108,13 @@ public GetNewApplicationResponse getNewApplication( * * @param request request to submit a new application * @return (empty) response on accepting the submission - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getNewApplication(GetNewApplicationRequest) */ public SubmitApplicationResponse submitApplication( SubmitApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to request the @@ -132,13 +132,13 @@ public SubmitApplicationResponse submitApplication( * @param request request to abort a submited application * @return ResourceManager returns an empty response * on success and throws an exception on rejecting the request - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getQueueUserAcls(GetQueueUserAclsInfoRequest) */ public KillApplicationResponse forceKillApplication( KillApplicationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of an Application from @@ -168,12 +168,12 @@ public KillApplicationResponse forceKillApplication( * * @param request request for an application report * @return application report - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetApplicationReportResponse getApplicationReport( GetApplicationReportRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get metrics about the cluster from @@ -186,12 +186,12 @@ public GetApplicationReportResponse getApplicationReport( * * @param request request for cluster metrics * @return cluster metrics - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetClusterMetricsResponse getClusterMetrics( GetClusterMetricsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of all Applications @@ -208,12 +208,12 @@ public GetClusterMetricsResponse getClusterMetrics( * * @param request request for report on all running applications * @return report on all running applications - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetAllApplicationsResponse getAllApplications( GetAllApplicationsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get a report of all nodes @@ -225,12 +225,12 @@ public GetAllApplicationsResponse getAllApplications( * * @param request request for report on all nodes * @return report on all nodes - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetClusterNodesResponse getClusterNodes( GetClusterNodesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get information about queues @@ -244,12 +244,12 @@ public GetClusterNodesResponse getClusterNodes( * * @param request request to get queue information * @return queue information - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetQueueInfoResponse getQueueInfo( GetQueueInfoRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get information about queue @@ -261,12 +261,12 @@ public GetQueueInfoResponse getQueueInfo( * * @param request request to get queue acls for current user * @return queue acls for current user - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetQueueUserAclsInfoResponse getQueueUserAcls( GetQueueUserAclsInfoRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The interface used by clients to get delegation token, enabling the @@ -277,24 +277,24 @@ public GetQueueUserAclsInfoResponse getQueueUserAcls( * service. * @param request request to get a delegation token for the client. * @return delegation token that can be used to talk to this service - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public GetDelegationTokenResponse getDelegationToken( GetDelegationTokenRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Renew an existing delegation token. * * @param request the delegation token to be renewed. * @return the new expiry time for the delegation token. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Private public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException, + RenewDelegationTokenRequest request) throws YarnException, IOException; /** @@ -302,11 +302,11 @@ public RenewDelegationTokenResponse renewDelegationToken( * * @param request the delegation token to be cancelled. * @return an empty response. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Private public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException, + CancelDelegationTokenRequest request) throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java index 0961ac4..eab2e46 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ContainerManager.java @@ -32,7 +32,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerStatus; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; /** *

The protocol between an ApplicationMaster and a @@ -69,13 +69,13 @@ * @param request request to start a container * @return empty response to indicate acceptance of the request * or an exception - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The ApplicationMaster requests a NodeManager @@ -96,13 +96,13 @@ StartContainerResponse startContainer(StartContainerRequest request) * @param request request to stop a container * @return empty response to indicate acceptance of the request * or an exception - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

The api used by the ApplicationMaster to request for @@ -121,12 +121,12 @@ StopContainerResponse stopContainer(StopContainerRequest request) * with the specified ContainerId * @return response containing the ContainerStatus of the * container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ @Public @Stable GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java index 9a2d04c..7fba4de 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/RMAdminProtocol.java @@ -21,7 +21,7 @@ import java.io.IOException; import org.apache.hadoop.tools.GetUserMappingsProtocol; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.api.protocolrecords.RefreshAdminAclsRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshAdminAclsResponse; import org.apache.hadoop.yarn.api.protocolrecords.RefreshNodesRequest; @@ -37,25 +37,25 @@ public interface RMAdminProtocol extends GetUserMappingsProtocol { public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( RefreshUserToGroupsMappingsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshAdminAclsResponse refreshAdminAcls( RefreshAdminAclsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; public RefreshServiceAclsResponse refreshServiceAcls( RefreshServiceAclsRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java index e69de29..54b6797 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnException.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.yarn.exceptions; + +public class YarnException extends Exception { + + private static final long serialVersionUID = 1L; + + public YarnException() { + super(); + } + + public YarnException(String message) { + super(message); + } + + public YarnException(Throwable cause) { + super(cause); + } + + public YarnException(String message, Throwable cause) { + super(message, cause); + } + +} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java deleted file mode 100644 index d03f8dd..0000000 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/exceptions/YarnRemoteException.java +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.yarn.exceptions; - -public class YarnRemoteException extends Exception { - - private static final long serialVersionUID = 1L; - - public YarnRemoteException() { - super(); - } - - public YarnRemoteException(String message) { - super(message); - } - - public YarnRemoteException(Throwable cause) { - super(cause); - } - - public YarnRemoteException(String message, Throwable cause) { - super(message, cause); - } - -} \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java index f016675..a80da18 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/factory/providers/RecordFactoryProvider.java @@ -22,7 +22,7 @@ import java.lang.reflect.Method; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; @@ -55,13 +55,13 @@ private static Object getFactoryClassInstance(String factoryClassName) { method.setAccessible(true); return method.invoke(null, null); } catch (ClassNotFoundException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java index 88dcffd..f645837 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java @@ -72,7 +72,7 @@ import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.AMRMClientAsync; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.util.ConverterUtils; @@ -434,10 +434,10 @@ private void printUsage(Options opts) { /** * Main run function for the application master * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - public boolean run() throws YarnRemoteException, IOException { + public boolean run() throws YarnException, IOException { LOG.info("Starting ApplicationMaster"); AMRMClientAsync.CallbackHandler allocListener = new RMCallbackHandler(); @@ -537,7 +537,7 @@ private void finish() { } try { resourceManager.unregisterApplicationMaster(appStatus, appMessage, null); - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { LOG.error("Failed to unregister application", ex); } catch (IOException e) { LOG.error("Failed to unregister application", e); @@ -777,7 +777,7 @@ public void run() { startReq.setContainerToken(container.getContainerToken()); try { cm.startContainer(startReq); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Start container failed for :" + ", containerId=" + container.getId()); e.printStackTrace(); @@ -802,7 +802,7 @@ public void run() { // LOG.info("Container Status" // + ", id=" + container.getId() // + ", status=" +statusResp.getStatus()); - // } catch (YarnRemoteException e) { + // } catch (YarnException e) { // e.printStackTrace(); // } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java index 8cbf2de..4c1a23e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java @@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; @@ -312,9 +312,9 @@ public boolean init(String[] args) throws ParseException { * Main run function for the client * @return true if application completed successfully * @throws IOException - * @throws YarnRemoteException + * @throws YarnException */ - public boolean run() throws IOException, YarnRemoteException { + public boolean run() throws IOException, YarnException { LOG.info("Running Client"); start(); @@ -591,11 +591,11 @@ else if (amMemory > maxMem) { * Kill application if time expires. * @param appId Application Id of application to be monitored * @return true if application completed successfully - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private boolean monitorApplication(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { while (true) { @@ -656,11 +656,11 @@ else if (YarnApplicationState.KILLED == state /** * Kill a submitted application by sending a call to the ASM * @param appId Application Id to be killed. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private void forceKillApplication(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO clarify whether multiple jobs with the same app id can be submitted and be running at // the same time. // If yes, can we kill a particular attempt only? diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java index a5de27a..a01370a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/src/main/java/org/apache/hadoop/yarn/applications/unmanagedamlauncher/UnmanagedAMLauncher.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.YarnClientImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Records; /** @@ -268,7 +268,7 @@ public void run() { amProc.destroy(); } - public boolean run() throws IOException, YarnRemoteException { + public boolean run() throws IOException, YarnException { LOG.info("Starting Client"); // Connect to ResourceManager @@ -353,11 +353,11 @@ public boolean run() throws IOException, YarnRemoteException { * @param appId * Application Id of application to be monitored * @return true if application completed successfully - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private ApplicationReport monitorApplication(ApplicationId appId, - Set finalState) throws YarnRemoteException, + Set finalState) throws YarnException, IOException { long foundAMCompletedTime = 0; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java index e56d5c3..5b4ba5c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClient.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; import com.google.common.collect.ImmutableList; @@ -113,14 +113,14 @@ public StoredContainerRequest(Resource capability, String[] hosts, * @param appHostPort Port master is listening on * @param appTrackingUrl URL at which the master info can be seen * @return RegisterApplicationMasterResponse - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster(String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Request additional containers and receive new container allocations. @@ -134,24 +134,24 @@ public StoredContainerRequest(Resource capability, String[] hosts, * App should not make concurrent allocate requests. May cause request loss. * @param progressIndicator Indicates progress made by the master * @return the response of the allocate request - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public AllocateResponse allocate(float progressIndicator) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Unregister the application master. This must be called in the end. * @param appStatus Success/Failure status of the master * @param appMessage Diagnostics message on failure * @param appTrackingUrl New URL to get master info - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public void unregisterApplicationMaster(FinalApplicationStatus appStatus, String appMessage, String appTrackingUrl) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** * Request containers for resources before calling allocate diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java index 02520d9..c24e3ba 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientAsync.java @@ -31,7 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.AbstractService; import com.google.common.annotations.VisibleForTesting; @@ -152,7 +152,7 @@ public void start() { @Override public void stop() { if (Thread.currentThread() == handlerThread) { - throw new YarnException("Cannot call stop from callback handler thread!"); + throw new YarnRuntimeException("Cannot call stop from callback handler thread!"); } keepRunning = false; try { @@ -184,12 +184,12 @@ public void setHeartbeatInterval(int interval) { /** * Registers this application master with the resource manager. On successful * registration, starts the heartbeating thread. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public RegisterApplicationMasterResponse registerApplicationMaster( String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RegisterApplicationMasterResponse response = client .registerApplicationMaster(appHostName, appHostPort, appTrackingUrl); heartbeatThread.start(); @@ -201,11 +201,11 @@ public RegisterApplicationMasterResponse registerApplicationMaster( * @param appStatus Success/Failure status of the master * @param appMessage Diagnostics message on failure * @param appTrackingUrl New URL to get master info - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ public void unregisterApplicationMaster(FinalApplicationStatus appStatus, - String appMessage, String appTrackingUrl) throws YarnRemoteException, + String appMessage, String appTrackingUrl) throws YarnException, IOException { synchronized (unregisterHeartbeatLock) { keepRunning = false; @@ -277,7 +277,7 @@ public void run() { try { response = client.allocate(progress); - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { LOG.error("Yarn exception on heartbeat", ex); savedException = ex; // interrupt handler thread in case it waiting on the queue diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java index be851a5..fdd0c62 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/AMRMClientImpl.java @@ -40,7 +40,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -169,7 +169,7 @@ public synchronized void start() { try { currentUser = UserGroupInformation.getCurrentUser(); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } // CurrentUser should already have AMToken loaded. @@ -195,7 +195,7 @@ public synchronized void stop() { @Override public RegisterApplicationMasterResponse registerApplicationMaster( String appHostName, int appHostPort, String appTrackingUrl) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // do this only once ??? RegisterApplicationMasterRequest request = recordFactory .newRecordInstance(RegisterApplicationMasterRequest.class); @@ -214,7 +214,7 @@ public RegisterApplicationMasterResponse registerApplicationMaster( @Override public AllocateResponse allocate(float progressIndicator) - throws YarnRemoteException, IOException { + throws YarnException, IOException { AllocateResponse allocateResponse = null; ArrayList askList = null; ArrayList releaseList = null; @@ -268,7 +268,7 @@ public AllocateResponse allocate(float progressIndicator) @Override public void unregisterApplicationMaster(FinalApplicationStatus appStatus, - String appMessage, String appTrackingUrl) throws YarnRemoteException, + String appMessage, String appTrackingUrl) throws YarnException, IOException { FinishApplicationMasterRequest request = recordFactory .newRecordInstance(FinishApplicationMasterRequest.class); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java index d5e94d5..0e45aa6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClient.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Token; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; @InterfaceAudience.Public @@ -51,12 +51,12 @@ * NodeManager to launch the * container * @return a map between the auxiliary service names and their outputs - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ Map startContainer(Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

Stop an started container.

@@ -65,11 +65,11 @@ * @param nodeId the Id of the NodeManager * @param containerToken the security token to verify authenticity of the * started container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ void stopContainer(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException; + Token containerToken) throws YarnException, IOException; /** *

Query the status of a container.

@@ -79,11 +79,11 @@ void stopContainer(ContainerId containerId, NodeId nodeId, * @param containerToken the security token to verify authenticity of the * started container * @return the status of a container - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ ContainerStatus getContainerStatus(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException; + Token containerToken) throws YarnException, IOException; /** *

Set whether the containers that are started by this client, and are diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java index becb357..cf1a8af 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientAsync.java @@ -51,7 +51,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AbstractEvent; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.state.InvalidStateTransitonException; @@ -446,7 +446,7 @@ public ContainerState transition( + "Container " + containerId, thr); } return ContainerState.RUNNING; - } catch (YarnRemoteException e) { + } catch (YarnException e) { return onExceptionRaised(container, event, e); } catch (IOException e) { return onExceptionRaised(container, event, e); @@ -490,7 +490,7 @@ public ContainerState transition( + "Container " + event.getContainerId(), thr); } return ContainerState.DONE; - } catch (YarnRemoteException e) { + } catch (YarnException e) { return onExceptionRaised(container, event, e); } catch (IOException e) { return onExceptionRaised(container, event, e); @@ -602,7 +602,7 @@ public void run() { "Unchecked exception is thrown from onContainerStatusReceived" + " for Container " + event.getContainerId(), thr); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { onExceptionRaised(containerId, e); } catch (IOException e) { onExceptionRaised(containerId, e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java index 1a564f4..5c4b810 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/NMClientImpl.java @@ -44,7 +44,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.NodeId; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; @@ -112,7 +112,7 @@ protected synchronized void cleanupRunningContainers() { stopContainer(startedContainer.getContainerId(), startedContainer.getNodeId(), startedContainer.getContainerToken()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.error("Failed to stop Container " + startedContainer.getContainerId() + "when stopping NMClientImpl"); @@ -213,7 +213,7 @@ public synchronized void stop() { public synchronized Map startContainer( Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { if (!container.getId().equals(containerId)) { throw new IllegalArgumentException( "NMCommunicator's containerId mismatches the given Container's"); @@ -228,7 +228,7 @@ public synchronized void stop() { if (LOG.isDebugEnabled()) { LOG.debug("Started Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn("Container " + containerId + " failed to start", e); throw e; } catch (IOException e) { @@ -238,7 +238,7 @@ public synchronized void stop() { return startResponse.getAllServiceResponse(); } - public synchronized void stopContainer() throws YarnRemoteException, + public synchronized void stopContainer() throws YarnException, IOException { try { StopContainerRequest stopRequest = @@ -248,7 +248,7 @@ public synchronized void stopContainer() throws YarnRemoteException, if (LOG.isDebugEnabled()) { LOG.debug("Stopped Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn("Container " + containerId + " failed to stop", e); throw e; } catch (IOException e) { @@ -258,7 +258,7 @@ public synchronized void stopContainer() throws YarnRemoteException, } public synchronized ContainerStatus getContainerStatus() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetContainerStatusResponse statusResponse = null; try { GetContainerStatusRequest statusRequest = @@ -268,7 +268,7 @@ public synchronized ContainerStatus getContainerStatus() if (LOG.isDebugEnabled()) { LOG.debug("Got the status of Container " + containerId); } - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.warn( "Unable to get the status of Container " + containerId, e); throw e; @@ -284,7 +284,7 @@ public synchronized ContainerStatus getContainerStatus() @Override public Map startContainer( Container container, ContainerLaunchContext containerLaunchContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // Do synchronization on StartedContainer to prevent race condition // between startContainer and stopContainer synchronized (addStartedContainer(container)) { @@ -297,7 +297,7 @@ public synchronized ContainerStatus getContainerStatus() nmCommunicator.start(); allServiceResponse = nmCommunicator.startContainer(container, containerLaunchContext); - } catch (YarnRemoteException e) { + } catch (YarnException e) { // Remove the started container if it failed to start removeStartedContainer(container.getId()); throw e; @@ -326,7 +326,7 @@ public synchronized ContainerStatus getContainerStatus() @Override public void stopContainer(ContainerId containerId, NodeId nodeId, - Token containerToken) throws YarnRemoteException, IOException { + Token containerToken) throws YarnException, IOException { StartedContainer startedContainer = getStartedContainer(containerId); if (startedContainer == null) { throw RPCUtil.getRemoteException("Container " + containerId + @@ -359,7 +359,7 @@ public void stopContainer(ContainerId containerId, NodeId nodeId, @Override public ContainerStatus getContainerStatus(ContainerId containerId, NodeId nodeId, Token containerToken) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NMCommunicator nmCommunicator = null; try { nmCommunicator = new NMCommunicator(containerId, nodeId, containerToken); @@ -375,7 +375,7 @@ public ContainerStatus getContainerStatus(ContainerId containerId, } protected synchronized StartedContainer addStartedContainer( - Container container) throws YarnRemoteException, IOException { + Container container) throws YarnException, IOException { if (startedContainers.containsKey(container.getId())) { throw RPCUtil.getRemoteException("Container " + container.getId() + " is already started"); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java index b9be159..4ed9901 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/RMAdmin.java @@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RefreshSuperUserGroupsConfigurationRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -188,7 +188,7 @@ public RMAdminProtocol run() { return adminProtocol; } - private int refreshQueues() throws IOException, YarnRemoteException { + private int refreshQueues() throws IOException, YarnException { // Refresh the queue properties RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshQueuesRequest request = @@ -197,7 +197,7 @@ private int refreshQueues() throws IOException, YarnRemoteException { return 0; } - private int refreshNodes() throws IOException, YarnRemoteException { + private int refreshNodes() throws IOException, YarnException { // Refresh the nodes RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshNodesRequest request = @@ -207,7 +207,7 @@ private int refreshNodes() throws IOException, YarnRemoteException { } private int refreshUserToGroupsMappings() throws IOException, - YarnRemoteException { + YarnException { // Refresh the user-to-groups mappings RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshUserToGroupsMappingsRequest request = @@ -217,7 +217,7 @@ private int refreshUserToGroupsMappings() throws IOException, } private int refreshSuperUserGroupsConfiguration() throws IOException, - YarnRemoteException { + YarnException { // Refresh the super-user groups RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshSuperUserGroupsConfigurationRequest request = @@ -226,7 +226,7 @@ private int refreshSuperUserGroupsConfiguration() throws IOException, return 0; } - private int refreshAdminAcls() throws IOException, YarnRemoteException { + private int refreshAdminAcls() throws IOException, YarnException { // Refresh the admin acls RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshAdminAclsRequest request = @@ -235,7 +235,7 @@ private int refreshAdminAcls() throws IOException, YarnRemoteException { return 0; } - private int refreshServiceAcls() throws IOException, YarnRemoteException { + private int refreshServiceAcls() throws IOException, YarnException { // Refresh the service acls RMAdminProtocol adminProtocol = createAdminProtocol(); RefreshServiceAclsRequest request = diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java index b7cf5a0..53303b1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClient.java @@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.service.Service; @InterfaceAudience.Public @@ -58,10 +58,10 @@ * * @return response containing the new ApplicationId to be used * to submit an application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - GetNewApplicationResponse getNewApplication() throws YarnRemoteException, + GetNewApplicationResponse getNewApplication() throws YarnException, IOException; /** @@ -75,12 +75,12 @@ GetNewApplicationResponse getNewApplication() throws YarnRemoteException, * {@link ApplicationSubmissionContext} containing all the details * needed to submit a new application * @return {@link ApplicationId} of the accepted application - * @throws YarnRemoteException + * @throws YarnException * @throws IOException * @see #getNewApplication() */ ApplicationId submitApplication(ApplicationSubmissionContext appContext) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -89,13 +89,13 @@ ApplicationId submitApplication(ApplicationSubmissionContext appContext) * * @param applicationId * {@link ApplicationId} of the application that needs to be killed - * @throws YarnRemoteException + * @throws YarnException * in case of errors or if YARN rejects the request due to * access-control restrictions. * @throws IOException * @see #getQueueAclsInfo() */ - void killApplication(ApplicationId applicationId) throws YarnRemoteException, + void killApplication(ApplicationId applicationId) throws YarnException, IOException; /** @@ -125,11 +125,11 @@ void killApplication(ApplicationId applicationId) throws YarnRemoteException, * @param appId * {@link ApplicationId} of the application that needs a report * @return application report - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -143,10 +143,10 @@ ApplicationReport getApplicationReport(ApplicationId appId) *

* * @return a list of reports of all running applications - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getApplicationList() throws YarnRemoteException, + List getApplicationList() throws YarnException, IOException; /** @@ -155,10 +155,10 @@ ApplicationReport getApplicationReport(ApplicationId appId) *

* * @return cluster metrics - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, + YarnClusterMetrics getYarnClusterMetrics() throws YarnException, IOException; /** @@ -167,10 +167,10 @@ YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, *

* * @return A list of report of all nodes - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getNodeReports() throws YarnRemoteException, IOException; + List getNodeReports() throws YarnException, IOException; /** *

@@ -181,11 +181,11 @@ YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, * securely talking to YARN. * @return a delegation token ({@link Token}) that can be used to * talk to YARN - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ Token getRMDelegationToken(Text renewer) - throws YarnRemoteException, IOException; + throws YarnException, IOException; /** *

@@ -195,12 +195,12 @@ Token getRMDelegationToken(Text renewer) * @param queueName * Name of the queue whose information is needed * @return queue information - * @throws YarnRemoteException + * @throws YarnException * in case of errors or if YARN rejects the request due to * access-control restrictions. * @throws IOException */ - QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, + QueueInfo getQueueInfo(String queueName) throws YarnException, IOException; /** @@ -210,10 +210,10 @@ QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, *

* * @return a list of queue-information for all queues - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getAllQueues() throws YarnRemoteException, IOException; + List getAllQueues() throws YarnException, IOException; /** *

@@ -221,10 +221,10 @@ QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, *

* * @return a list of queue-information for all the top-level queues - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getRootQueueInfos() throws YarnRemoteException, IOException; + List getRootQueueInfos() throws YarnException, IOException; /** *

@@ -236,10 +236,10 @@ QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, * Name of the queue whose child-queues' information is needed * @return a list of queue-information for all queues who are direct children * of the given parent queue. - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getChildQueueInfos(String parent) throws YarnRemoteException, + List getChildQueueInfos(String parent) throws YarnException, IOException; /** @@ -250,9 +250,9 @@ QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, * * @return a list of queue acls ({@link QueueUserACLInfo}) for * current user - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - List getQueueAclsInfo() throws YarnRemoteException, + List getQueueAclsInfo() throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java index 4eb8885..aea180c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/YarnClientImpl.java @@ -57,7 +57,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.util.Records; @@ -121,7 +121,7 @@ public synchronized void stop() { @Override public GetNewApplicationResponse getNewApplication() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetNewApplicationRequest request = Records.newRecord(GetNewApplicationRequest.class); return rmClient.getNewApplication(request); @@ -130,7 +130,7 @@ public GetNewApplicationResponse getNewApplication() @Override public ApplicationId submitApplication(ApplicationSubmissionContext appContext) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationId applicationId = appContext.getApplicationId(); appContext.setApplicationId(applicationId); SubmitApplicationRequest request = @@ -167,7 +167,7 @@ public GetNewApplicationResponse getNewApplication() @Override public void killApplication(ApplicationId applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LOG.info("Killing application " + applicationId); KillApplicationRequest request = Records.newRecord(KillApplicationRequest.class); @@ -177,7 +177,7 @@ public void killApplication(ApplicationId applicationId) @Override public ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetApplicationReportRequest request = Records.newRecord(GetApplicationReportRequest.class); request.setApplicationId(appId); @@ -188,7 +188,7 @@ public ApplicationReport getApplicationReport(ApplicationId appId) @Override public List getApplicationList() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetAllApplicationsRequest request = Records.newRecord(GetAllApplicationsRequest.class); GetAllApplicationsResponse response = rmClient.getAllApplications(request); @@ -196,7 +196,7 @@ public ApplicationReport getApplicationReport(ApplicationId appId) } @Override - public YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, + public YarnClusterMetrics getYarnClusterMetrics() throws YarnException, IOException { GetClusterMetricsRequest request = Records.newRecord(GetClusterMetricsRequest.class); @@ -205,7 +205,7 @@ public YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, } @Override - public List getNodeReports() throws YarnRemoteException, + public List getNodeReports() throws YarnException, IOException { GetClusterNodesRequest request = Records.newRecord(GetClusterNodesRequest.class); @@ -215,7 +215,7 @@ public YarnClusterMetrics getYarnClusterMetrics() throws YarnRemoteException, @Override public Token getRMDelegationToken(Text renewer) - throws YarnRemoteException, IOException { + throws YarnException, IOException { /* get the token from RM */ GetDelegationTokenRequest rmDTRequest = Records.newRecord(GetDelegationTokenRequest.class); @@ -238,7 +238,7 @@ public Token getRMDelegationToken(Text renewer) } @Override - public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, + public QueueInfo getQueueInfo(String queueName) throws YarnException, IOException { GetQueueInfoRequest request = getQueueInfoRequest(queueName, true, false, false); @@ -247,7 +247,7 @@ public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, } @Override - public List getQueueAclsInfo() throws YarnRemoteException, + public List getQueueAclsInfo() throws YarnException, IOException { GetQueueUserAclsInfoRequest request = Records.newRecord(GetQueueUserAclsInfoRequest.class); @@ -255,7 +255,7 @@ public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, } @Override - public List getAllQueues() throws YarnRemoteException, + public List getAllQueues() throws YarnException, IOException { List queues = new ArrayList(); @@ -267,7 +267,7 @@ public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, } @Override - public List getRootQueueInfos() throws YarnRemoteException, + public List getRootQueueInfos() throws YarnException, IOException { List queues = new ArrayList(); @@ -280,7 +280,7 @@ public QueueInfo getQueueInfo(String queueName) throws YarnRemoteException, @Override public List getChildQueueInfos(String parent) - throws YarnRemoteException, IOException { + throws YarnException, IOException { List queues = new ArrayList(); QueueInfo parentQueue = diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java index 582b5ad..6bcd804 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/ApplicationCLI.java @@ -30,7 +30,7 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; public class ApplicationCLI extends YarnCLI { @@ -90,10 +90,10 @@ private void printUsage(Options opts) { /** * Lists all the applications present in the Resource Manager * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - private void listAllApplications() throws YarnRemoteException, IOException { + private void listAllApplications() throws YarnException, IOException { PrintWriter writer = new PrintWriter(sysout); List appsReport = client.getApplicationList(); @@ -117,11 +117,11 @@ private void listAllApplications() throws YarnRemoteException, IOException { * Kills the application with the application id as appId * * @param applicationId - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ private void killApplication(String applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationId appId = ConverterUtils.toApplicationId(applicationId); sysout.println("Killing application " + applicationId); client.killApplication(appId); @@ -131,10 +131,10 @@ private void killApplication(String applicationId) * Prints the application report for an application id. * * @param applicationId - * @throws YarnRemoteException + * @throws YarnException */ private void printApplicationReport(String applicationId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationReport appReport = client.getApplicationReport(ConverterUtils .toApplicationId(applicationId)); // Use PrintWriter.println, which uses correct platform line ending. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java index 83033ae..b701d21 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/cli/NodeCLI.java @@ -31,7 +31,7 @@ import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeReport; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; public class NodeCLI extends YarnCLI { @@ -83,10 +83,10 @@ private void printUsage(Options opts) { /** * Lists all the nodes present in the cluster * - * @throws YarnRemoteException + * @throws YarnException * @throws IOException */ - private void listClusterNodes() throws YarnRemoteException, IOException { + private void listClusterNodes() throws YarnException, IOException { PrintWriter writer = new PrintWriter(sysout); List nodesReport = client.getNodeReports(); writer.println("Total Nodes:" + nodesReport.size()); @@ -105,9 +105,9 @@ private void listClusterNodes() throws YarnRemoteException, IOException { * Prints the node report for node id. * * @param nodeIdStr - * @throws YarnRemoteException + * @throws YarnException */ - private void printNodeStatus(String nodeIdStr) throws YarnRemoteException, + private void printNodeStatus(String nodeIdStr) throws YarnException, IOException { NodeId nodeId = ConverterUtils.toNodeId(nodeIdStr); List nodesReport = client.getNodeReports(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java index 74a1d16..5668436 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClient.java @@ -53,7 +53,7 @@ import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.AMRMClient.StoredContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.util.BuilderUtils; @@ -162,7 +162,7 @@ public static void tearDown() { } @Test (timeout=60000) - public void testAMRMClientMatchingFit() throws YarnRemoteException, IOException { + public void testAMRMClientMatchingFit() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -263,7 +263,7 @@ private void verifyMatches( } @Test (timeout=60000) - public void testAMRMClientMatchStorage() throws YarnRemoteException, IOException { + public void testAMRMClientMatchStorage() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -384,7 +384,7 @@ public void testAMRMClientMatchStorage() throws YarnRemoteException, IOException } @Test (timeout=60000) - public void testAMRMClient() throws YarnRemoteException, IOException { + public void testAMRMClient() throws YarnException, IOException { AMRMClientImpl amClient = null; try { // start am rm client @@ -407,7 +407,7 @@ public void testAMRMClient() throws YarnRemoteException, IOException { } private void testAllocation(final AMRMClientImpl amClient) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // setup container request assertTrue(amClient.ask.size() == 0); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java index b637b50..7bb0d0b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestAMRMClientAsync.java @@ -42,7 +42,7 @@ import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.BuilderUtils; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; @@ -152,7 +152,7 @@ public void testAMRMClientAsyncException() throws Exception { @SuppressWarnings("unchecked") AMRMClient client = mock(AMRMClientImpl.class); String exStr = "TestException"; - YarnRemoteException mockException = mock(YarnRemoteException.class); + YarnException mockException = mock(YarnException.class); when(mockException.getMessage()).thenReturn(exStr); when(client.allocate(anyFloat())).thenThrow(mockException); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java index 8e1c392..6f46ded 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClient.java @@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.util.Records; @@ -71,7 +71,7 @@ int nodeCount = 3; @Before - public void setup() throws YarnRemoteException, IOException { + public void setup() throws YarnException, IOException { // start minicluster conf = new YarnConfiguration(); yarnCluster = @@ -175,7 +175,7 @@ public void tearDown() { @Test (timeout = 60000) public void testNMClient() - throws YarnRemoteException, IOException { + throws YarnException, IOException { rmClient.registerApplicationMaster("Host", 10000, ""); @@ -187,7 +187,7 @@ public void testNMClient() private Set allocateContainers( AMRMClientImpl rmClient, int num) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // setup container request Resource capability = Resource.newInstance(1024, 0); Priority priority = Priority.newInstance(0); @@ -228,7 +228,7 @@ public void testNMClient() } private void testContainerManagement(NMClientImpl nmClient, - Set containers) throws YarnRemoteException, IOException { + Set containers) throws YarnException, IOException { int size = containers.size(); int i = 0; for (Container container : containers) { @@ -238,7 +238,7 @@ private void testContainerManagement(NMClientImpl nmClient, nmClient.getContainerStatus(container.getId(), container.getNodeId(), container.getContainerToken()); fail("Exception is expected"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { assertTrue("The thrown exception is not expected", e.getMessage().contains("is not handled by this NodeManager")); } @@ -249,7 +249,7 @@ private void testContainerManagement(NMClientImpl nmClient, nmClient.stopContainer(container.getId(), container.getNodeId(), container.getContainerToken()); fail("Exception is expected"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { assertTrue("The thrown exception is not expected", e.getMessage().contains( "is either not started yet or already stopped")); @@ -265,7 +265,7 @@ private void testContainerManagement(NMClientImpl nmClient, clc.setTokens(securityTokens); try { nmClient.startContainer(container, clc); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Exception is not expected"); } @@ -278,7 +278,7 @@ private void testContainerManagement(NMClientImpl nmClient, try { nmClient.stopContainer(container.getId(), container.getNodeId(), container.getContainerToken()); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Exception is not expected"); } @@ -299,7 +299,7 @@ private void sleep(int sleepTime) { private void testGetContainerStatus(Container container, int index, ContainerState state, String diagnostics, int exitStatus) - throws YarnRemoteException, IOException { + throws YarnException, IOException { while (true) { try { ContainerStatus status = nmClient.getContainerStatus( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java index bcbf94d..c9905d1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestNMClientAsync.java @@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -135,7 +135,7 @@ public void testNMClientAsync() throws Exception { Collections.synchronizedSet(new HashSet()); protected MockNMClientAsync1(int expectedSuccess, int expectedFailure) - throws YarnRemoteException, IOException { + throws YarnException, IOException { super(MockNMClientAsync1.class.getName(), mockNMClient(0), new TestCallbackHandler1(expectedSuccess, expectedFailure)); } @@ -360,7 +360,7 @@ private void assertAtomicIntegerArray(AtomicIntegerArray array) { } private NMClient mockNMClient(int mode) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NMClient client = mock(NMClient.class); switch (mode) { case 0: @@ -435,7 +435,7 @@ public void run() { private CyclicBarrier barrierB; protected MockNMClientAsync2(CyclicBarrier barrierA, CyclicBarrier barrierB, - CyclicBarrier barrierC) throws YarnRemoteException, IOException { + CyclicBarrier barrierC) throws YarnException, IOException { super(MockNMClientAsync2.class.getName(), mockNMClient(0), new TestCallbackHandler2(barrierC)); this.barrierA = barrierA; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java index 2700039..fbc876a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/TestYarnClient.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -95,7 +95,7 @@ public void testSubmitApplication() { ((MockYarnClient) client).setYarnApplicationState(exitStates[i]); try { client.submitApplication(context); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } catch (IOException e) { Assert.fail("Exception is not expected."); @@ -153,7 +153,7 @@ public void start() { try{ when(rmClient.getApplicationReport(any( GetApplicationReportRequest.class))).thenReturn(mockResponse); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } catch (IOException e) { Assert.fail("Exception is not expected."); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java index 5ab35d3..064fe82 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/AMRMProtocolPBClientImpl.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; @@ -68,7 +68,7 @@ public void close() { @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { AllocateRequestProto requestProto = ((AllocateRequestPBImpl) request).getProto(); try { @@ -81,7 +81,7 @@ public AllocateResponse allocate(AllocateRequest request) @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { FinishApplicationMasterRequestProto requestProto = ((FinishApplicationMasterRequestPBImpl) request).getProto(); @@ -96,7 +96,7 @@ public FinishApplicationMasterResponse finishApplicationMaster( @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { RegisterApplicationMasterRequestProto requestProto = ((RegisterApplicationMasterRequestPBImpl) request).getProto(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java index 096bedf..8394324 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientRMProtocolPBClientImpl.java @@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; @@ -113,7 +113,7 @@ public void close() { @Override public KillApplicationResponse forceKillApplication( - KillApplicationRequest request) throws YarnRemoteException, IOException { + KillApplicationRequest request) throws YarnException, IOException { KillApplicationRequestProto requestProto = ((KillApplicationRequestPBImpl) request).getProto(); try { @@ -127,7 +127,7 @@ public KillApplicationResponse forceKillApplication( @Override public GetApplicationReportResponse getApplicationReport( - GetApplicationReportRequest request) throws YarnRemoteException, + GetApplicationReportRequest request) throws YarnException, IOException { GetApplicationReportRequestProto requestProto = ((GetApplicationReportRequestPBImpl) request).getProto(); @@ -142,7 +142,7 @@ public GetApplicationReportResponse getApplicationReport( @Override public GetClusterMetricsResponse getClusterMetrics( - GetClusterMetricsRequest request) throws YarnRemoteException, + GetClusterMetricsRequest request) throws YarnException, IOException { GetClusterMetricsRequestProto requestProto = ((GetClusterMetricsRequestPBImpl) request).getProto(); @@ -157,7 +157,7 @@ public GetClusterMetricsResponse getClusterMetrics( @Override public GetNewApplicationResponse getNewApplication( - GetNewApplicationRequest request) throws YarnRemoteException, + GetNewApplicationRequest request) throws YarnException, IOException { GetNewApplicationRequestProto requestProto = ((GetNewApplicationRequestPBImpl) request).getProto(); @@ -172,7 +172,7 @@ public GetNewApplicationResponse getNewApplication( @Override public SubmitApplicationResponse submitApplication( - SubmitApplicationRequest request) throws YarnRemoteException, + SubmitApplicationRequest request) throws YarnException, IOException { SubmitApplicationRequestProto requestProto = ((SubmitApplicationRequestPBImpl) request).getProto(); @@ -187,7 +187,7 @@ public SubmitApplicationResponse submitApplication( @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnRemoteException, + GetAllApplicationsRequest request) throws YarnException, IOException { GetAllApplicationsRequestProto requestProto = ((GetAllApplicationsRequestPBImpl) request).getProto(); @@ -203,7 +203,7 @@ public GetAllApplicationsResponse getAllApplications( @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetClusterNodesRequestProto requestProto = ((GetClusterNodesRequestPBImpl) request).getProto(); try { @@ -217,7 +217,7 @@ public GetAllApplicationsResponse getAllApplications( @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetQueueInfoRequestProto requestProto = ((GetQueueInfoRequestPBImpl) request).getProto(); try { @@ -231,7 +231,7 @@ public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( - GetQueueUserAclsInfoRequest request) throws YarnRemoteException, + GetQueueUserAclsInfoRequest request) throws YarnException, IOException { GetQueueUserAclsInfoRequestProto requestProto = ((GetQueueUserAclsInfoRequestPBImpl) request).getProto(); @@ -246,7 +246,7 @@ public GetQueueUserAclsInfoResponse getQueueUserAcls( @Override public GetDelegationTokenResponse getDelegationToken( - GetDelegationTokenRequest request) throws YarnRemoteException, + GetDelegationTokenRequest request) throws YarnException, IOException { GetDelegationTokenRequestProto requestProto = ((GetDelegationTokenRequestPBImpl) request).getProto(); @@ -261,7 +261,7 @@ public GetDelegationTokenResponse getDelegationToken( @Override public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException, + RenewDelegationTokenRequest request) throws YarnException, IOException { RenewDelegationTokenRequestProto requestProto = ((RenewDelegationTokenRequestPBImpl) request).getProto(); @@ -276,7 +276,7 @@ public RenewDelegationTokenResponse renewDelegationToken( @Override public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException, + CancelDelegationTokenRequest request) throws YarnException, IOException { CancelDelegationTokenRequestProto requestProto = ((CancelDelegationTokenRequestPBImpl) request).getProto(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java index 2f16479..8921375 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagerPBClientImpl.java @@ -42,7 +42,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerResponsePBImpl; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; @@ -86,7 +86,7 @@ public void close() { @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException { GetContainerStatusRequestProto requestProto = ((GetContainerStatusRequestPBImpl) request).getProto(); @@ -101,7 +101,7 @@ public GetContainerStatusResponse getContainerStatus( @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { StartContainerRequestProto requestProto = ((StartContainerRequestPBImpl) request).getProto(); try { @@ -115,7 +115,7 @@ public StartContainerResponse startContainer(StartContainerRequest request) @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { StopContainerRequestProto requestProto = ((StopContainerRequestPBImpl) request).getProto(); try { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java index 3d4f7fd..29b51a6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/RMAdminProtocolPBClientImpl.java @@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserRequestProto; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.GetGroupsForUserResponseProto; @@ -87,7 +87,7 @@ public void close() { @Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshQueuesRequestProto requestProto = ((RefreshQueuesRequestPBImpl)request).getProto(); try { @@ -101,7 +101,7 @@ public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) @Override public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshNodesRequestProto requestProto = ((RefreshNodesRequestPBImpl)request).getProto(); try { @@ -116,7 +116,7 @@ public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) @Override public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RefreshSuperUserGroupsConfigurationRequestProto requestProto = ((RefreshSuperUserGroupsConfigurationRequestPBImpl)request).getProto(); try { @@ -130,7 +130,7 @@ public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfigu @Override public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( - RefreshUserToGroupsMappingsRequest request) throws YarnRemoteException, + RefreshUserToGroupsMappingsRequest request) throws YarnException, IOException { RefreshUserToGroupsMappingsRequestProto requestProto = ((RefreshUserToGroupsMappingsRequestPBImpl)request).getProto(); @@ -145,7 +145,7 @@ public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( @Override public RefreshAdminAclsResponse refreshAdminAcls( - RefreshAdminAclsRequest request) throws YarnRemoteException, IOException { + RefreshAdminAclsRequest request) throws YarnException, IOException { RefreshAdminAclsRequestProto requestProto = ((RefreshAdminAclsRequestPBImpl)request).getProto(); try { @@ -159,7 +159,7 @@ public RefreshAdminAclsResponse refreshAdminAcls( @Override public RefreshServiceAclsResponse refreshServiceAcls( - RefreshServiceAclsRequest request) throws YarnRemoteException, + RefreshServiceAclsRequest request) throws YarnException, IOException { RefreshServiceAclsRequestProto requestProto = ((RefreshServiceAclsRequestPBImpl)request).getProto(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java index f98c031..dae6133 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/AMRMProtocolPBServiceImpl.java @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FinishApplicationMasterResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RegisterApplicationMasterResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto; @@ -57,7 +57,7 @@ public AllocateResponseProto allocate(RpcController arg0, try { AllocateResponse response = real.allocate(request); return ((AllocateResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -72,7 +72,7 @@ public FinishApplicationMasterResponseProto finishApplicationMaster( try { FinishApplicationMasterResponse response = real.finishApplicationMaster(request); return ((FinishApplicationMasterResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -87,7 +87,7 @@ public RegisterApplicationMasterResponseProto registerApplicationMaster( try { RegisterApplicationMasterResponse response = real.registerApplicationMaster(request); return ((RegisterApplicationMasterResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java index 8fb1b71..caad876 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ClientRMProtocolPBServiceImpl.java @@ -64,7 +64,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllApplicationsResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto; @@ -102,7 +102,7 @@ public KillApplicationResponseProto forceKillApplication(RpcController arg0, try { KillApplicationResponse response = real.forceKillApplication(request); return ((KillApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -117,7 +117,7 @@ public GetApplicationReportResponseProto getApplicationReport( try { GetApplicationReportResponse response = real.getApplicationReport(request); return ((GetApplicationReportResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -131,7 +131,7 @@ public GetClusterMetricsResponseProto getClusterMetrics(RpcController arg0, try { GetClusterMetricsResponse response = real.getClusterMetrics(request); return ((GetClusterMetricsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -146,7 +146,7 @@ public GetNewApplicationResponseProto getNewApplication( try { GetNewApplicationResponse response = real.getNewApplication(request); return ((GetNewApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -160,7 +160,7 @@ public SubmitApplicationResponseProto submitApplication(RpcController arg0, try { SubmitApplicationResponse response = real.submitApplication(request); return ((SubmitApplicationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -176,7 +176,7 @@ public GetAllApplicationsResponseProto getAllApplications( try { GetAllApplicationsResponse response = real.getAllApplications(request); return ((GetAllApplicationsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -191,7 +191,7 @@ public GetClusterNodesResponseProto getClusterNodes(RpcController controller, try { GetClusterNodesResponse response = real.getClusterNodes(request); return ((GetClusterNodesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -206,7 +206,7 @@ public GetQueueInfoResponseProto getQueueInfo(RpcController controller, try { GetQueueInfoResponse response = real.getQueueInfo(request); return ((GetQueueInfoResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -222,7 +222,7 @@ public GetQueueUserAclsInfoResponseProto getQueueUserAcls( try { GetQueueUserAclsInfoResponse response = real.getQueueUserAcls(request); return ((GetQueueUserAclsInfoResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -238,7 +238,7 @@ public GetDelegationTokenResponseProto getDelegationToken( try { GetDelegationTokenResponse response = real.getDelegationToken(request); return ((GetDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -254,7 +254,7 @@ public RenewDelegationTokenResponseProto renewDelegationToken( try { RenewDelegationTokenResponse response = real.renewDelegationToken(request); return ((RenewDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -270,7 +270,7 @@ public CancelDelegationTokenResponseProto cancelDelegationToken( try { CancelDelegationTokenResponse response = real.cancelDelegationToken(request); return ((CancelDelegationTokenResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java index 19eefff..398fce6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ContainerManagerPBServiceImpl.java @@ -31,7 +31,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainerResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusRequestProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerStatusResponseProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto; @@ -57,7 +57,7 @@ public GetContainerStatusResponseProto getContainerStatus(RpcController arg0, try { GetContainerStatusResponse response = real.getContainerStatus(request); return ((GetContainerStatusResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -71,7 +71,7 @@ public StartContainerResponseProto startContainer(RpcController arg0, try { StartContainerResponse response = real.startContainer(request); return ((StartContainerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -85,7 +85,7 @@ public StopContainerResponseProto stopContainer(RpcController arg0, try { StopContainerResponse response = real.stopContainer(request); return ((StopContainerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java index 385d909..5450e71 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/RMAdminProtocolPBServiceImpl.java @@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshSuperUserGroupsConfigurationResponsePBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsRequestPBImpl; import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RefreshUserToGroupsMappingsResponsePBImpl; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.*; import com.google.protobuf.RpcController; @@ -61,7 +61,7 @@ public RefreshQueuesResponseProto refreshQueues(RpcController controller, try { RefreshQueuesResponse response = real.refreshQueues(request); return ((RefreshQueuesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -77,7 +77,7 @@ public RefreshAdminAclsResponseProto refreshAdminAcls( try { RefreshAdminAclsResponse response = real.refreshAdminAcls(request); return ((RefreshAdminAclsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -91,7 +91,7 @@ public RefreshNodesResponseProto refreshNodes(RpcController controller, try { RefreshNodesResponse response = real.refreshNodes(request); return ((RefreshNodesResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -110,7 +110,7 @@ public RefreshNodesResponseProto refreshNodes(RpcController controller, RefreshSuperUserGroupsConfigurationResponse response = real.refreshSuperUserGroupsConfiguration(request); return ((RefreshSuperUserGroupsConfigurationResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -127,7 +127,7 @@ public RefreshUserToGroupsMappingsResponseProto refreshUserToGroupsMappings( RefreshUserToGroupsMappingsResponse response = real.refreshUserToGroupsMappings(request); return ((RefreshUserToGroupsMappingsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -144,7 +144,7 @@ public RefreshServiceAclsResponseProto refreshServiceAcls( RefreshServiceAclsResponse response = real.refreshServiceAcls(request); return ((RefreshServiceAclsResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java index 6335a8b..a8a9be4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/event/AsyncDispatcher.java @@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ShutdownHookManager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.AbstractService; /** @@ -190,7 +190,7 @@ public void handle(Event event) { if (!stopped) { LOG.warn("AsyncDispatcher thread interrupted", e); } - throw new YarnException(e); + throw new YarnRuntimeException(e); } }; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java index ce8d05c..4eadaa4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RecordFactoryPBImpl.java @@ -24,7 +24,7 @@ import java.util.concurrent.ConcurrentMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; public class RecordFactoryPBImpl implements RecordFactory { @@ -53,7 +53,7 @@ public static RecordFactory get() { try { pbClazz = localConf.getClassByName(getPBImplClassName(clazz)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPBImplClassName(clazz) + "]", e); } try { @@ -61,18 +61,18 @@ public static RecordFactory get() { constructor.setAccessible(true); cache.putIfAbsent(clazz, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find 0 argument constructor", e); + throw new YarnRuntimeException("Could not find 0 argument constructor", e); } } try { Object retObject = constructor.newInstance(); return (T)retObject; } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java index 9fc81d2..41acf7d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcClientFactoryPBImpl.java @@ -28,7 +28,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RpcClientFactory; public class RpcClientFactoryPBImpl implements RpcClientFactory { @@ -59,7 +59,7 @@ public Object getClient(Class protocol, long clientVersion, try { pbClazz = localConf.getClassByName(getPBImplClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPBImplClassName(protocol) + "]", e); } try { @@ -67,18 +67,18 @@ public Object getClient(Class protocol, long clientVersion, constructor.setAccessible(true); cache.putIfAbsent(protocol, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); + throw new YarnRuntimeException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); } } try { Object retObject = constructor.newInstance(clientVersion, addr, conf); return retObject; } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -88,11 +88,11 @@ public void stopClient(Object proxy) { Method closeMethod = proxy.getClass().getMethod("close"); closeMethod.invoke(proxy); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (Exception e) { LOG.error("Cannot call close method due to Exception. " + "Ignoring.", e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java index 49c7bc7..54eb1df 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java @@ -34,7 +34,7 @@ import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RpcServerFactory; import com.google.protobuf.BlockingService; @@ -81,7 +81,7 @@ public Server getServer(Class protocol, Object instance, pbServiceImplClazz = localConf .getClassByName(getPbServiceImplClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getPbServiceImplClassName(protocol) + "]", e); } try { @@ -89,7 +89,7 @@ public Server getServer(Class protocol, Object instance, constructor.setAccessible(true); serviceCache.putIfAbsent(protocol, constructor); } catch (NoSuchMethodException e) { - throw new YarnException("Could not find constructor with params: " + throw new YarnRuntimeException("Could not find constructor with params: " + Long.TYPE + ", " + InetSocketAddress.class + ", " + Configuration.class, e); } @@ -99,11 +99,11 @@ public Server getServer(Class protocol, Object instance, try { service = constructor.newInstance(instance); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InstantiationException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } Class pbProtocol = service.getClass().getInterfaces()[0]; @@ -113,7 +113,7 @@ public Server getServer(Class protocol, Object instance, try { protoClazz = localConf.getClassByName(getProtoClassName(protocol)); } catch (ClassNotFoundException e) { - throw new YarnException("Failed to load class: [" + throw new YarnRuntimeException("Failed to load class: [" + getProtoClassName(protocol) + "]", e); } try { @@ -122,7 +122,7 @@ public Server getServer(Class protocol, Object instance, method.setAccessible(true); protoCache.putIfAbsent(protocol, method); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -130,11 +130,11 @@ public Server getServer(Class protocol, Object instance, return createServer(pbProtocol, addr, conf, secretManager, numHandlers, (BlockingService)method.invoke(null, service), portRangeConfig); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IOException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java index 38deca6..09b3231 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factory/providers/RpcFactoryProvider.java @@ -22,7 +22,7 @@ import java.lang.reflect.Method; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RpcClientFactory; import org.apache.hadoop.yarn.factories.RpcServerFactory; @@ -61,13 +61,13 @@ private static Object getFactoryClassInstance(String factoryClassName) { method.setAccessible(true); return method.invoke(null, null); } catch (ClassNotFoundException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (NoSuchMethodException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (InvocationTargetException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } catch (IllegalAccessException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java index 4e93d03..3260b78 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/RPCUtil.java @@ -23,24 +23,24 @@ import java.lang.reflect.InvocationTargetException; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import com.google.protobuf.ServiceException; public class RPCUtil { /** - * Returns an instance of YarnRemoteException + * Returns an instance of {@link YarnException} */ - public static YarnRemoteException getRemoteException(Throwable t) { - return new YarnRemoteException(t); + public static YarnException getRemoteException(Throwable t) { + return new YarnException(t); } /** - * Returns an instance of YarnRemoteException + * Returns an instance of {@link YarnException} */ - public static YarnRemoteException getRemoteException(String message) { - return new YarnRemoteException(message); + public static YarnException getRemoteException(String message) { + return new YarnException(message); } private static T instantiateException( @@ -74,10 +74,10 @@ public static YarnRemoteException getRemoteException(String message) { * @param se * ServiceException * @return An instance of the actual exception, which will be a subclass of - * {@link YarnRemoteException} or {@link IOException} + * {@link YarnException} or {@link IOException} */ public static Void unwrapAndThrowException(ServiceException se) - throws IOException, YarnRemoteException { + throws IOException, YarnException { Throwable cause = se.getCause(); if (cause == null) { // SE generated by the RPC layer itself. @@ -92,12 +92,12 @@ public static Void unwrapAndThrowException(ServiceException se) // Assume this to be a new exception type added to YARN. This isn't // absolutely correct since the RPC layer could add an exception as // well. - throw instantiateException(YarnRemoteException.class, re); + throw instantiateException(YarnException.class, re); } - if (YarnRemoteException.class.isAssignableFrom(realClass)) { + if (YarnException.class.isAssignableFrom(realClass)) { throw instantiateException( - realClass.asSubclass(YarnRemoteException.class), re); + realClass.asSubclass(YarnException.class), re); } else if (IOException.class.isAssignableFrom(realClass)) { throw instantiateException(realClass.asSubclass(IOException.class), re); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java index 512b8d4..eb37062 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/ipc/YarnRPC.java @@ -26,7 +26,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; /** @@ -63,7 +63,7 @@ public static YarnRPC create(Configuration conf) { try { return (YarnRPC) Class.forName(clazzName).newInstance(); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java index 185020d..5169168 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java @@ -54,7 +54,7 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.file.tfile.TFile; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; @@ -365,7 +365,7 @@ public String getApplicationOwner() throws IOException { try { aclString = valueStream.readUTF(); } catch (EOFException e) { - throw new YarnException("Error reading ACLs", e); + throw new YarnRuntimeException("Error reading ACLs", e); } acls.put(ApplicationAccessType.valueOf(appAccessOp), aclString); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java index 3a29450..0d1ea12 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/AdminACLsManager.java @@ -27,7 +27,7 @@ import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; public class AdminACLsManager { @@ -69,7 +69,7 @@ public AdminACLsManager(Configuration conf) { adminAcl.addUser(owner.getShortUserName()); } catch (IOException e){ LOG.warn("Could not add current user to admin:" + e); - throw new YarnException(e); + throw new YarnRuntimeException(e); } aclsEnabled = conf.getBoolean(YarnConfiguration.YARN_ACL_ENABLE, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java index 54a8117..c56f232 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/client/RMDelegationTokenIdentifier.java @@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.Records; @@ -105,7 +105,7 @@ public long renew(Token token, Configuration conf) throws IOException, Records.newRecord(RenewDelegationTokenRequest.class); request.setDelegationToken(convertToProtoToken(token)); return rmClient.renewDelegationToken(request).getNextExpirationTime(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } finally { RPC.stopProxy(rmClient); @@ -127,7 +127,7 @@ public void cancel(Token token, Configuration conf) throws IOException, Records.newRecord(CancelDelegationTokenRequest.class); request.setDelegationToken(convertToProtoToken(token)); rmClient.cancelDelegationToken(request); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } finally { RPC.stopProxy(rmClient); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java index cd4e523..26a091d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/service/CompositeService.java @@ -26,7 +26,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * Composition of services. @@ -75,7 +75,7 @@ public synchronized void start() { // call stop() on all services including failed service to make sure cleanup // happens. stop(i); - throw new YarnException("Failed to Start " + getName(), e); + throw new YarnRuntimeException("Failed to Start " + getName(), e); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java index aeef3a2..8708fa4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/state/InvalidStateTransitonException.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.state; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; -public class InvalidStateTransitonException extends YarnException { +public class InvalidStateTransitonException extends YarnRuntimeException { private Enum currentState; private Enum event; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java index 01fc38c..962c2b9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/Apps.java @@ -22,7 +22,7 @@ import java.util.Map; import org.apache.hadoop.util.StringInterner; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import static org.apache.hadoop.yarn.util.StringHelper.*; @@ -56,7 +56,7 @@ public static void shouldHaveNext(String prefix, String s, Iterator it) } public static void throwParseException(String name, String s) { - throw new YarnException(join("Error parsing ", name, ": ", s)); + throw new YarnRuntimeException(join("Error parsing ", name, ": ", s)); } public static void setEnvFromInputString(Map env, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java index 09b8bdd..a1f0768 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebAppException.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.webapp; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; -public class WebAppException extends YarnException { +public class WebAppException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java index e3a5d2d..5b70dca 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestContainerLaunchRPC.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; @@ -131,7 +131,7 @@ private void testRPCTimeout(String rpcClass) throws Exception { @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { GetContainerStatusResponse response = recordFactory .newRecordInstance(GetContainerStatusResponse.class); response.setStatus(status); @@ -140,23 +140,23 @@ public GetContainerStatusResponse getContainerStatus( @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { try { // make the thread sleep to look like its not going to respond Thread.sleep(10000); } catch (Exception e) { LOG.error(e); - throw new YarnRemoteException(e); + throw new YarnException(e); } - throw new YarnRemoteException("Shouldn't happen!!"); + throw new YarnException("Shouldn't happen!!"); } @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { Exception e = new Exception("Dummy function", new Exception( "Dummy function cause")); - throw new YarnRemoteException(e); + throw new YarnException(e); } } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index 050879a..0bdc589 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; @@ -85,7 +85,7 @@ public void testUnknownCall() { proxy.getNewApplication(Records .newRecord(GetNewApplicationRequest.class)); Assert.fail("Excepted RPC call to fail with unknown method."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue(e.getMessage().matches( "Unknown method getNewApplication called on.*" + "org.apache.hadoop.yarn.proto.ClientRMProtocol" @@ -147,7 +147,7 @@ private void test(String rpcClass) throws Exception { StopContainerRequest stopRequest = recordFactory.newRecordInstance(StopContainerRequest.class); stopRequest.setContainerId(containerId); proxy.stopContainer(stopRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { exception = true; Assert.assertTrue(e.getMessage().contains(EXCEPTION_MSG)); Assert.assertTrue(e.getMessage().contains(EXCEPTION_CAUSE)); @@ -169,7 +169,7 @@ private void test(String rpcClass) throws Exception { @Override public GetContainerStatusResponse getContainerStatus( GetContainerStatusRequest request) - throws YarnRemoteException { + throws YarnException { GetContainerStatusResponse response = recordFactory.newRecordInstance(GetContainerStatusResponse.class); response.setStatus(status); @@ -178,7 +178,7 @@ public GetContainerStatusResponse getContainerStatus( @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; @@ -198,10 +198,10 @@ public StartContainerResponse startContainer(StartContainerRequest request) @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { Exception e = new Exception(EXCEPTION_MSG, new Exception(EXCEPTION_CAUSE)); - throw new YarnRemoteException(e); + throw new YarnException(e); } } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java index e2c0e6f..672416c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java @@ -33,7 +33,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.junit.Test; @@ -61,7 +61,7 @@ private void testPbServerFactory() { RpcServerFactoryPBImpl.get().getServer( AMRMProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -89,12 +89,12 @@ private void testPbClientFactory() { AMRMProtocol amrmClient = null; try { amrmClient = (AMRMProtocol) RpcClientFactoryPBImpl.get().getClient(AMRMProtocol.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -108,7 +108,7 @@ private void testPbClientFactory() { @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -116,7 +116,7 @@ public RegisterApplicationMasterResponse registerApplicationMaster( @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -124,7 +124,7 @@ public FinishApplicationMasterResponse finishApplicationMaster( @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO Auto-generated method stub return null; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java index 19c50ce..cb49894 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java @@ -38,7 +38,7 @@ public void testPbRecordFactory() { AllocateResponse response = pbRecordFactory.newRecordInstance(AllocateResponse.class); Assert.assertEquals(AllocateResponsePBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } @@ -47,7 +47,7 @@ public void testPbRecordFactory() { AllocateRequest response = pbRecordFactory.newRecordInstance(AllocateRequest.class); Assert.assertEquals(AllocateRequestPBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java index db5caa9..ad5afe4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRpcFactoryProvider.java @@ -50,12 +50,12 @@ public void testFactoryProvider() { try { clientFactory = RpcFactoryProvider.getClientFactory(conf); Assert.fail("Expected an exception - unknown serializer"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } try { serverFactory = RpcFactoryProvider.getServerFactory(conf); Assert.fail("Expected an exception - unknown serializer"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } conf = new Configuration(); @@ -65,11 +65,11 @@ public void testFactoryProvider() { try { clientFactory = RpcFactoryProvider.getClientFactory(conf); Assert.fail("Expected an exception - unknown class"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } try { serverFactory = RpcFactoryProvider.getServerFactory(conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { Assert.fail("Error while loading factory using reflection: [" + RpcServerFactoryPBImpl.class.getName() + "]"); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java index 809ef7c..68c4364 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestYarnUncaughtExceptionHandler.java @@ -30,7 +30,7 @@ private static final YarnUncaughtExceptionHandler exHandler = new YarnUncaughtExceptionHandler(); /** - * Throw {@code YarnException} inside thread and + * Throw {@code YarnRuntimeException} inside thread and * check {@code YarnUncaughtExceptionHandler} instance * * @throws InterruptedException @@ -39,7 +39,7 @@ public void testUncaughtExceptionHandlerWithRuntimeException() throws InterruptedException { final YarnUncaughtExceptionHandler spyYarnHandler = spy(exHandler); - final YarnException yarnException = new YarnException( + final YarnRuntimeException yarnException = new YarnRuntimeException( "test-yarn-runtime-exception"); final Thread yarnThread = new Thread(new Runnable() { @Override diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java index 82e20cd..3131c7b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/ipc/TestRPCUtil.java @@ -24,7 +24,7 @@ import junit.framework.Assert; import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.junit.Test; import com.google.protobuf.ServiceException; @@ -33,7 +33,7 @@ @Test public void testUnknownExceptionUnwrapping() { - Class exception = YarnRemoteException.class; + Class exception = YarnException.class; String className = "UnknownException.class"; verifyRemoteExceptionUnwrapping(exception, className); } @@ -53,7 +53,7 @@ public void testRemoteIOExceptionDerivativeUnwrapping() { @Test public void testRemoteYarnExceptionUnwrapping() { - Class exception = YarnRemoteException.class; + Class exception = YarnException.class; verifyRemoteExceptionUnwrapping(exception, exception.getName()); } @@ -73,7 +73,7 @@ public void testUnexpectedRemoteExceptionUnwrapping() { @Test public void testRemoteYarnExceptionWithoutStringConstructor() { - // Derivatives of YarnException should always defined a string constructor. + // Derivatives of YarnException should always define a string constructor. Class exception = YarnTestExceptionNoConstructor.class; verifyRemoteExceptionUnwrapping(RemoteException.class, exception.getName()); } @@ -131,7 +131,7 @@ private void verifyRemoteExceptionUnwrapping( .getMessage().contains(message)); } - private static class YarnTestException extends YarnRemoteException { + private static class YarnTestException extends YarnException { private static final long serialVersionUID = 1L; @SuppressWarnings("unused") @@ -141,7 +141,7 @@ public YarnTestException(String message) { } private static class YarnTestExceptionNoConstructor extends - YarnRemoteException { + YarnException { private static final long serialVersionUID = 1L; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java index 67c2de1..0fc598a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestCompositeService.java @@ -22,7 +22,7 @@ import static org.junit.Assert.fail; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.service.CompositeService; import org.apache.hadoop.yarn.service.Service.STATE; import org.junit.Before; @@ -129,7 +129,7 @@ public void testServiceStartup() { try { serviceManager.start(); fail("Exception should have been thrown due to startup failure of last service"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { for (int i = 0; i < NUM_OF_SERVICES - 1; i++) { if (i >= FAILED_SERVICE_SEQ_NUMBER) { // Failed service state should be INITED @@ -170,7 +170,7 @@ public void testServiceStop() { // Stop the composite service try { serviceManager.stop(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { for (int i = 0; i < NUM_OF_SERVICES - 1; i++) { assertEquals("Service state should have been ", STATE.STOPPED, services[NUM_OF_SERVICES].getServiceState()); @@ -202,7 +202,7 @@ public synchronized void init(Configuration conf) { @Override public synchronized void start() { if (throwExceptionOnStart) { - throw new YarnException("Fake service start exception"); + throw new YarnRuntimeException("Fake service start exception"); } counter++; callSequenceNumber = counter; @@ -214,7 +214,7 @@ public synchronized void stop() { counter++; callSequenceNumber = counter; if (throwExceptionOnStop) { - throw new YarnException("Fake service stop exception"); + throw new YarnRuntimeException("Fake service stop exception"); } super.stop(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java index 51f8198..56cc317 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/ResourceTracker.java @@ -19,7 +19,7 @@ import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; @@ -28,10 +28,10 @@ public interface ResourceTracker { public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException; public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java index b638284..396204c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java @@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto; @@ -53,7 +53,7 @@ public ResourceTrackerPBClientImpl(long clientVersion, InetSocketAddress addr, C @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerRequestProto requestProto = ((RegisterNodeManagerRequestPBImpl)request).getProto(); try { @@ -66,7 +66,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeHeartbeatRequestProto requestProto = ((NodeHeartbeatRequestPBImpl)request).getProto(); try { return new NodeHeartbeatResponsePBImpl(proxy.nodeHeartbeat(null, requestProto)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java index 4d0d3e7..442e3c8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/service/ResourceTrackerPBServiceImpl.java @@ -20,7 +20,7 @@ import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatRequestProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.NodeHeartbeatResponseProto; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos.RegisterNodeManagerRequestProto; @@ -53,7 +53,7 @@ public RegisterNodeManagerResponseProto registerNodeManager( try { RegisterNodeManagerResponse response = real.registerNodeManager(request); return ((RegisterNodeManagerResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); @@ -67,7 +67,7 @@ public NodeHeartbeatResponseProto nodeHeartbeat(RpcController controller, try { NodeHeartbeatResponse response = real.nodeHeartbeat(request); return ((NodeHeartbeatResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java index 63d74f3..33ac783 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPCFactories.java @@ -26,8 +26,8 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.YarnRuntimeException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -60,7 +60,7 @@ private void testPbServerFactory() { RpcServerFactoryPBImpl.get().getServer( ResourceTracker.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -86,12 +86,12 @@ private void testPbClientFactory() { ResourceTracker client = null; try { client = (ResourceTracker) RpcClientFactoryPBImpl.get().getClient(ResourceTracker.class, 1, NetUtils.getConnectAddress(server), conf); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -103,7 +103,7 @@ private void testPbClientFactory() { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { // TODO Auto-generated method stub return null; @@ -111,7 +111,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { // TODO Auto-generated method stub return null; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java index b833e61..6ce11c8 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRecordFactory.java @@ -20,7 +20,7 @@ import junit.framework.Assert; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; @@ -35,7 +35,7 @@ public void testPbRecordFactory() { try { NodeHeartbeatRequest request = pbRecordFactory.newRecordInstance(NodeHeartbeatRequest.class); Assert.assertEquals(NodeHeartbeatRequestPBImpl.class, request.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java index 582db06..9b66fa4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LocalDirsHandlerService.java @@ -33,7 +33,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.AbstractService; @@ -88,7 +88,7 @@ */ private final class MonitoringTimerTask extends TimerTask { - public MonitoringTimerTask(Configuration conf) throws YarnException { + public MonitoringTimerTask(Configuration conf) throws YarnRuntimeException { localDirs = new DirectoryCollection( validatePaths(conf.getTrimmedStrings(YarnConfiguration.NM_LOCAL_DIRS))); logDirs = new DirectoryCollection( @@ -132,7 +132,7 @@ public void init(Configuration config) { try { localFs = FileContext.getLocalFSFileContext(config); } catch (IOException e) { - throw new YarnException("Unable to get the local filesystem", e); + throw new YarnRuntimeException("Unable to get the local filesystem", e); } FsPermission perm = new FsPermission((short)0755); boolean createSucceeded = localDirs.createNonExistentDirs(localFs, perm); @@ -311,13 +311,13 @@ public Path getLogPathToRead(String pathStr) throws IOException { } else { LOG.warn(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); - throw new YarnException(paths[i] + throw new YarnRuntimeException(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); } } catch (IllegalArgumentException e) { LOG.warn(e.getMessage()); - throw new YarnException(paths[i] + throw new YarnRuntimeException(paths[i] + " is not a valid path. Path should be with " + FILE_SCHEME + " scheme or without scheme"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java index e310b94..04bf9ee 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeManager.java @@ -35,7 +35,7 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -145,7 +145,7 @@ public void init(Configuration conf) { try { exec.init(); } catch (IOException e) { - throw new YarnException("Failed to initialize container executor", e); + throw new YarnRuntimeException("Failed to initialize container executor", e); } DeletionService del = createDeletionService(exec); addService(del); @@ -201,7 +201,7 @@ public void start() { try { doSecureLogin(); } catch (IOException e) { - throw new YarnException("Failed NodeManager login", e); + throw new YarnRuntimeException("Failed NodeManager login", e); } super.start(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java index 53c01d9..b671a95 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java @@ -35,7 +35,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; @@ -45,7 +45,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -205,7 +205,7 @@ protected ResourceTracker getRMClient() { } @VisibleForTesting - protected void registerWithRM() throws YarnRemoteException, IOException { + protected void registerWithRM() throws YarnException, IOException { Configuration conf = getConfig(); rmConnectWaitMS = conf.getInt( @@ -220,7 +220,7 @@ protected void registerWithRM() throws YarnRemoteException, IOException { * 1000; if(rmConnectionRetryIntervalMS < 0) { - throw new YarnException("Invalid Configuration. " + + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RESOURCEMANAGER_CONNECT_RETRY_INTERVAL_SECS + " should not be negative."); } @@ -229,7 +229,7 @@ protected void registerWithRM() throws YarnRemoteException, IOException { if(! waitForEver) { if(rmConnectWaitMS < 0) { - throw new YarnException("Invalid Configuration. " + + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RESOURCEMANAGER_CONNECT_WAIT_SECS + " can be -1, but can not be other negative numbers"); } @@ -280,7 +280,7 @@ protected void registerWithRM() throws YarnRemoteException, IOException { String errorMessage = "Failed to Connect to RM, " + "no. of failed attempts is "+rmRetryCount; LOG.error(errorMessage,e); - throw new YarnException(errorMessage,e); + throw new YarnRuntimeException(errorMessage,e); } } } @@ -289,7 +289,7 @@ protected void registerWithRM() throws YarnRemoteException, IOException { String message = "Message from ResourceManager: " + regNMResponse.getDiagnosticsMessage(); - throw new YarnException( + throw new YarnRuntimeException( "Recieved SHUTDOWN signal from Resourcemanager ,Registration of NodeManager failed, " + message); } @@ -454,7 +454,7 @@ public void run() { String errorMessage = "Failed to heartbeat to RM, " + "no. of failed attempts is "+rmRetryCount; LOG.error(errorMessage,e); - throw new YarnException(errorMessage,e); + throw new YarnRuntimeException(errorMessage,e); } } } @@ -507,7 +507,7 @@ public void run() { dispatcher.getEventHandler().handle( new CMgrCompletedAppsEvent(appsToCleanup)); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { //catch and throw the exception if tried MAX wait time to connect RM dispatcher.getEventHandler().handle( new NodeManagerEvent(NodeManagerEventType.SHUTDOWN)); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java index 4e7a072..6b63483 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/LocalizationProtocol.java @@ -19,11 +19,11 @@ import java.io.IOException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus; public interface LocalizationProtocol { public LocalizerHeartbeatResponse heartbeat(LocalizerStatus status) - throws YarnRemoteException, IOException; + throws YarnException, IOException; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java index 8ec1e81..b9cb8d9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java @@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -56,7 +56,7 @@ public void close() { @Override public LocalizerHeartbeatResponse heartbeat(LocalizerStatus status) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LocalizerStatusProto statusProto = ((LocalizerStatusPBImpl)status).getProto(); try { return new LocalizerHeartbeatResponsePBImpl( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java index d2b4b4e..cb2a6f7 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/service/LocalizationProtocolPBServiceImpl.java @@ -25,7 +25,7 @@ import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerHeartbeatResponseProto; import org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -47,7 +47,7 @@ public LocalizerHeartbeatResponseProto heartbeat(RpcController controller, try { LocalizerHeartbeatResponse response = real.heartbeat(request); return ((LocalizerHeartbeatResponsePBImpl)response).getProto(); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new ServiceException(e); } catch (IOException e) { throw new ServiceException(e); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java index cb9b988..61082ed 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -275,7 +275,7 @@ public void stop() { // Get the remoteUGI corresponding to the api call. private UserGroupInformation getRemoteUgi() - throws YarnRemoteException { + throws YarnException { UserGroupInformation remoteUgi; try { remoteUgi = UserGroupInformation.getCurrentUser(); @@ -309,7 +309,7 @@ private ContainerTokenIdentifier selectContainerTokenIdentifier( protected ContainerTokenIdentifier getContainerTokenIdentifier( UserGroupInformation remoteUgi, ContainerTokenIdentifier containerTokenIdentifier) - throws YarnRemoteException { + throws YarnException { if (UserGroupInformation.isSecurityEnabled()) { if (LOG.isDebugEnabled()) { LOG.debug("Number of TokenIdentifiers in the UGI from RPC: " @@ -331,14 +331,14 @@ protected ContainerTokenIdentifier getContainerTokenIdentifier( * passed if verifying the startContainer, null otherwise. * @param remoteUgi * ugi corresponding to the remote end making the api-call - * @throws YarnRemoteException + * @throws YarnException */ @Private @VisibleForTesting protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, ContainerTokenIdentifier tokenId) - throws YarnRemoteException { + throws YarnException { boolean unauthorized = false; StringBuilder messageBuilder = @@ -391,7 +391,7 @@ protected void authorizeRequest(String containerIDStr, @SuppressWarnings("unchecked") @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { if (blockNewContainerRequests.get()) { throw RPCUtil.getRemoteException(new NMNotYetReadyException( @@ -507,7 +507,7 @@ public StartContainerResponse startContainer(StartContainerRequest request) @Override @SuppressWarnings("unchecked") public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ContainerId containerID = request.getContainerId(); String containerIDStr = containerID.toString(); @@ -549,7 +549,7 @@ public StopContainerResponse stopContainer(StopContainerRequest request) @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException, + GetContainerStatusRequest request) throws YarnException, IOException { ContainerId containerID = request.getContainerId(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java index 87f1cae..d761326 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/InvalidContainerException.java @@ -18,12 +18,12 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * This Exception happens when NM is rejecting container requests from RM */ -public class InvalidContainerException extends YarnException { +public class InvalidContainerException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java index a47f681..d63bd2e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/NMNotYetReadyException.java @@ -18,13 +18,13 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * This exception happens when NM starts from scratch but has not yet connected * with RM. */ -public class NMNotYetReadyException extends YarnException { +public class NMNotYetReadyException extends YarnRuntimeException { private static final long serialVersionUID = 1L; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java index 0fb963a..163b2dc 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/ContainersLauncher.java @@ -31,7 +31,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.UnsupportedFileSystemException; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; @@ -96,7 +96,7 @@ public void init(Configuration conf) { //TODO Is this required? FileContext.getLocalFSFileContext(conf); } catch (UnsupportedFileSystemException e) { - throw new YarnException("Failed to start ContainersLauncher", e); + throw new YarnRuntimeException("Failed to start ContainersLauncher", e); } super.init(conf); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java index 8dce003..706cedd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ContainerLocalizer.java @@ -53,7 +53,7 @@ import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.LocalResource; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -253,13 +253,13 @@ protected void localizeFiles(LocalizationProtocol nodemanager, // ignore response try { nodemanager.heartbeat(status); - } catch (YarnRemoteException e) { } + } catch (YarnException e) { } return; } cs.poll(1000, TimeUnit.MILLISECONDS); } catch (InterruptedException e) { return; - } catch (YarnRemoteException e) { + } catch (YarnException e) { // TODO cleanup return; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java index a44a99d..eb79c50 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/ResourceLocalizationService.java @@ -66,7 +66,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.DiskChecker; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; @@ -174,7 +174,7 @@ FileContext getLocalFileContext(Configuration conf) { try { return FileContext.getLocalFSFileContext(conf); } catch (IOException e) { - throw new YarnException("Failed to access local fs"); + throw new YarnRuntimeException("Failed to access local fs"); } } @@ -185,7 +185,7 @@ private void validateConf(Configuration conf) { if (perDirFileLimit <= 36) { LOG.error(YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with very low value."); - throw new YarnException( + throw new YarnRuntimeException( YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with a value less than 37."); } else { @@ -224,7 +224,7 @@ public void init(Configuration conf) { lfs.mkdir(new Path(logDir), null, true); } } catch (IOException e) { - throw new YarnException("Failed to initialize LocalizationService", e); + throw new YarnRuntimeException("Failed to initialize LocalizationService", e); } cacheTargetSize = @@ -318,7 +318,7 @@ public void handle(LocalizationEvent event) { ((ApplicationLocalizationEvent)event).getApplication()); break; default: - throw new YarnException("Unknown localization event: " + event); + throw new YarnRuntimeException("Unknown localization event: " + event); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java index 9567b60..0170080 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java @@ -37,7 +37,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -169,7 +169,7 @@ void verifyAndCreateRemoteLogDir(Configuration conf) { try { remoteFS = FileSystem.get(conf); } catch (IOException e) { - throw new YarnException("Unable to get Remote FileSystem instance", e); + throw new YarnRuntimeException("Unable to get Remote FileSystem instance", e); } boolean remoteExists = true; try { @@ -184,7 +184,7 @@ void verifyAndCreateRemoteLogDir(Configuration conf) { } catch (FileNotFoundException e) { remoteExists = false; } catch (IOException e) { - throw new YarnException( + throw new YarnRuntimeException( "Failed to check permissions for dir [" + this.remoteRootLogDir + "]", e); } @@ -198,7 +198,7 @@ void verifyAndCreateRemoteLogDir(Configuration conf) { remoteFS.mkdirs(qualified, new FsPermission(TLDIR_PERMISSIONS)); remoteFS.setPermission(qualified, new FsPermission(TLDIR_PERMISSIONS)); } catch (IOException e) { - throw new YarnException("Failed to create remoteLogDir [" + throw new YarnRuntimeException("Failed to create remoteLogDir [" + this.remoteRootLogDir + "]", e); } } @@ -279,7 +279,7 @@ public Object run() throws Exception { } }); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } @@ -293,7 +293,7 @@ private void initApp(final ApplicationId appId, String user, initAppAggregator(appId, user, credentials, logRetentionPolicy, appAcls); eventResponse = new ApplicationEvent(appId, ApplicationEventType.APPLICATION_LOG_HANDLING_INITED); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { LOG.warn("Application failed to init aggregation: " + e.getMessage()); eventResponse = new ApplicationEvent(appId, ApplicationEventType.APPLICATION_LOG_HANDLING_FAILED); @@ -319,7 +319,7 @@ protected void initAppAggregator(final ApplicationId appId, String user, getRemoteNodeLogFileForApp(appId, user), logRetentionPolicy, appAcls); if (this.appLogAggregators.putIfAbsent(appId, appLogAggregator) != null) { - throw new YarnException("Duplicate initApp for " + appId); + throw new YarnRuntimeException("Duplicate initApp for " + appId); } // wait until check for existing aggregator to create dirs try { @@ -328,10 +328,10 @@ protected void initAppAggregator(final ApplicationId appId, String user, } catch (Exception e) { appLogAggregators.remove(appId); closeFileSystems(userUgi); - if (!(e instanceof YarnException)) { - e = new YarnException(e); + if (!(e instanceof YarnRuntimeException)) { + e = new YarnRuntimeException(e); } - throw (YarnException)e; + throw (YarnRuntimeException)e; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java index 2e74a3b..b319046 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java @@ -23,7 +23,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; @@ -70,7 +70,7 @@ public synchronized void start() { } catch (Exception e) { String msg = "NMWebapps failed to start."; LOG.error(msg, e); - throw new YarnException(msg); + throw new YarnRuntimeException(msg); } super.start(); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java index 63b0fd4..d83f9b6 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/DummyContainerManager.java @@ -29,7 +29,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application; @@ -182,14 +182,14 @@ public void setBlockNewContainerRequests(boolean blockNewContainerRequests) { protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, ContainerTokenIdentifier tokenId) - throws YarnRemoteException { + throws YarnException { // do Nothing } @Override protected ContainerTokenIdentifier getContainerTokenIdentifier(UserGroupInformation remoteUgi, - ContainerTokenIdentifier containerTokenId) throws YarnRemoteException { + ContainerTokenIdentifier containerTokenId) throws YarnException { return containerTokenId; } } \ No newline at end of file diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java index 87f8e23..fc0c65a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/LocalRMInterface.java @@ -22,7 +22,7 @@ import java.nio.ByteBuffer; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -39,7 +39,7 @@ @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory.newRecordInstance(RegisterNodeManagerResponse.class); MasterKey masterKey = new MasterKeyPBImpl(); @@ -52,7 +52,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeHeartbeatResponse response = recordFactory.newRecordInstance(NodeHeartbeatResponse.class); return response; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java index 4c96d2d..3dbc7ae 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockNodeStatusUpdater.java @@ -25,7 +25,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -67,7 +67,7 @@ protected ResourceTracker getRMClient() { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); @@ -81,7 +81,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); LOG.info("Got heartbeat number " + heartBeatID); nodeStatus.setResponseId(heartBeatID++); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java index 396706d..fe2655d 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerManagerWithLCE.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.TestContainerManager; import org.junit.After; @@ -75,7 +75,7 @@ public void tearDown() throws IOException, InterruptedException { @Override public void testContainerSetup() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -98,7 +98,7 @@ public void testContainerManagerInitialization() throws IOException { @Override public void testContainerLaunchAndStop() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -110,7 +110,7 @@ public void testContainerLaunchAndStop() throws IOException, @Override public void testContainerLaunchAndExitSuccess() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -122,7 +122,7 @@ public void testContainerLaunchAndExitSuccess() throws IOException, @Override public void testContainerLaunchAndExitFailure() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); @@ -134,7 +134,7 @@ public void testContainerLaunchAndExitFailure() throws IOException, @Override public void testLocalFilesCleanup() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { // Don't run the test if the binary is not available. if (!shouldRunTest()) { LOG.info("LCE binary path is not passed. Not running the test"); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java index 44a5185..4daf8b2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestEventFlow.java @@ -35,7 +35,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.ResourceTracker; @@ -63,7 +63,7 @@ @Test public void testSuccessfulContainerLaunch() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { FileContext localFS = FileContext.getLocalFSFileContext(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java index fc6fba0..14764de 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLocalDirsHandlerService.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileUtil; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.Service.STATE; import org.junit.AfterClass; @@ -71,7 +71,7 @@ public void testValidPathsDirHandlerService() { try { dirSvc.init(conf); Assert.fail("Service should have thrown an exception due to wrong URI"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { } Assert.assertTrue("Service should not be inited", dirSvc.getServiceState() .compareTo(STATE.NOTINITED) == 0); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java index 98fabe1..9fc7795 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManager.java @@ -22,7 +22,7 @@ import java.io.IOException; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.Test; @@ -46,7 +46,7 @@ public void testContainerExecutorInitCall() { try { nm.init(conf); fail("Init should fail"); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { //PASS assert(e.getCause().getMessage().contains("dummy executor init called")); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java index 26ff794..1848aeb 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerReboot.java @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; @@ -98,7 +98,7 @@ public void tearDown() throws IOException, InterruptedException { @Test(timeout = 2000000) public void testClearLocalDirWhenNodeReboot() throws IOException, - YarnRemoteException, InterruptedException { + YarnException, InterruptedException { nm = new MyNodeManager(); nm.start(); @@ -147,7 +147,7 @@ public void testClearLocalDirWhenNodeReboot() throws IOException, .createRemoteUser(cId.toString()); currentUser.doAs(new PrivilegedExceptionAction() { @Override - public Void run() throws YarnRemoteException, IOException { + public Void run() throws YarnException, IOException { containerManager.startContainer(startRequest); return null; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java index a550268..3765198 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerResync.java @@ -35,7 +35,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl; @@ -82,7 +82,7 @@ public void tearDown() throws IOException, InterruptedException { @SuppressWarnings("unchecked") @Test public void testKillContainersOnResync() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { NodeManager nm = new TestNodeManager1(); YarnConfiguration conf = createNMConfig(); nm.init(conf); @@ -110,7 +110,7 @@ public void testKillContainersOnResync() throws IOException, @SuppressWarnings("unchecked") @Test public void testBlockNewContainerRequestsOnStartAndResync() - throws IOException, InterruptedException, YarnRemoteException { + throws IOException, InterruptedException, YarnException { NodeManager nm = new TestNodeManager2(); YarnConfiguration conf = createNMConfig(); nm.init(conf); @@ -166,7 +166,7 @@ public TestNodeStatusUpdaterImpl1(Context context, Dispatcher dispatcher, } @Override - protected void registerWithRM() throws YarnRemoteException, IOException { + protected void registerWithRM() throws YarnException, IOException { super.registerWithRM(); registrationCount++; } @@ -288,7 +288,7 @@ public void run() { numContainers++; try { getContainerManager().startContainer(startRequest); - } catch (YarnRemoteException e) { + } catch (YarnException e) { numContainersRejected++; Assert.assertTrue(e.getMessage().contains( "Rejecting new containers as NodeManager has not" + diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java index 33686fc..183cf39 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeManagerShutdown.java @@ -57,7 +57,7 @@ import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -103,7 +103,7 @@ public void tearDown() throws IOException, InterruptedException { @Test public void testKillContainersOnShutdown() throws IOException, - YarnRemoteException { + YarnException { NodeManager nm = new TestNodeManager(); nm.init(createNMConfig()); nm.start(); @@ -150,7 +150,7 @@ public void testKillContainersOnShutdown() throws IOException, public static void startContainer(NodeManager nm, ContainerId cId, FileContext localFS, File scriptFileDir, File processStartFile) - throws IOException, YarnRemoteException { + throws IOException, YarnException { File scriptFile = createUnhaltingScriptFile(cId, scriptFileDir, processStartFile); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java index 4e150c4..dc34336 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNodeStatusUpdater.java @@ -42,7 +42,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; @@ -54,7 +54,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -134,7 +134,7 @@ public MyResourceTracker(Context context) { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); Resource resource = request.getResource(); @@ -171,7 +171,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); LOG.info("Got heartbeat number " + heartBeatID); NodeManagerMetrics mockMetrics = mock(NodeManagerMetrics.class); @@ -334,7 +334,7 @@ public MyNodeStatusUpdater4(Context context, Dispatcher dispatcher, protected ResourceTracker getRMClient() { if(System.currentTimeMillis() - waitStartTime <= rmStartIntervalMS || rmNeverStart) { - throw new YarnException("Faking RM start failure as start " + + throw new YarnRuntimeException("Faking RM start failure as start " + "delay timer has not expired."); } else { return resourceTracker; @@ -407,7 +407,7 @@ public void stop() { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory @@ -419,7 +419,7 @@ public RegisterNodeManagerResponse registerNodeManager( } @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus nodeStatus = request.getNodeStatus(); nodeStatus.setResponseId(heartBeatID++); @@ -445,7 +445,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = @@ -457,7 +457,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { LOG.info("Got heartBeatId: [" + heartBeatID +"]"); NodeStatus nodeStatus = request.getNodeStatus(); nodeStatus.setResponseId(heartBeatID++); @@ -497,7 +497,7 @@ public MyResourceTracker4(Context context) { @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); @@ -508,7 +508,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { try { if (heartBeatID == 0) { Assert.assertEquals(request.getNodeStatus().getContainersStatuses() @@ -537,7 +537,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) .get(4).getState() == ContainerState.RUNNING && request.getNodeStatus().getContainersStatuses().get(4) .getContainerId().getId() == 5); - throw new YarnException("Lost the heartbeat response"); + throw new YarnRuntimeException("Lost the heartbeat response"); } else if (heartBeatID == 2) { Assert.assertEquals(request.getNodeStatus().getContainersStatuses() .size(), 7); @@ -589,7 +589,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) public NodeAction registerNodeAction = NodeAction.NORMAL; @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory @@ -602,7 +602,7 @@ public RegisterNodeManagerResponse registerNodeManager( @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { heartBeatID++; throw RPCUtil.getRemoteException("NodeHeartbeat exception"); } @@ -646,7 +646,7 @@ public void run() { nm.start(); } catch (Throwable e) { TestNodeStatusUpdater.this.nmStartError = e; - throw new YarnException(e); + throw new YarnRuntimeException(e); } } }.start(); @@ -765,7 +765,7 @@ protected NodeStatusUpdater createNodeStatusUpdater(Context context, return nodeStatusUpdater; } }; - verifyNodeStartFailure("org.apache.hadoop.yarn.YarnException: " + verifyNodeStartFailure("org.apache.hadoop.yarn.YarnRuntimeException: " + "Recieved SHUTDOWN signal from Resourcemanager ," + "Registration of NodeManager failed, " + "Message from ResourceManager: RM Shutting Down Node"); @@ -867,7 +867,7 @@ protected ContainerManagerImpl createContainerManager(Context context, @Override public void start() { // Simulating failure of starting RPC server - throw new YarnException("Starting of RPC Server failed"); + throw new YarnRuntimeException("Starting of RPC Server failed"); } }; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java index fdb1007..39b3337 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRPCFactories.java @@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; @@ -56,7 +56,7 @@ private void testPbServerFactory() { RpcServerFactoryPBImpl.get().getServer( LocalizationProtocol.class, instance, addr, conf, null, 1); server.start(); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { @@ -87,12 +87,12 @@ private void testPbClientFactory() { LocalizationProtocol.class, 1, NetUtils.getConnectAddress(server), conf); Assert.assertNotNull(client); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create client"); } - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to create server"); } finally { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java index 157134c..92d24bb 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestRecordFactory.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.yarn.server.nodemanager; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse; @@ -37,7 +37,7 @@ public void testPbRecordFactory() { LocalizerHeartbeatResponse.class); Assert.assertEquals(LocalizerHeartbeatResponsePBImpl.class, response.getClass()); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { e.printStackTrace(); Assert.fail("Failed to crete record"); } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java index eae48ab..6aa4ff9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/BaseContainerManagerTest.java @@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; @@ -183,7 +183,7 @@ public void setup() throws IOException { @Override protected void authorizeRequest(String containerIDStr, ContainerLaunchContext launchContext, UserGroupInformation remoteUgi, - ContainerTokenIdentifier tokenId) throws YarnRemoteException { + ContainerTokenIdentifier tokenId) throws YarnException { // do nothing } }; @@ -212,13 +212,13 @@ public void tearDown() throws IOException, InterruptedException { public static void waitForContainerState(ContainerManager containerManager, ContainerId containerID, ContainerState finalState) - throws InterruptedException, YarnRemoteException, IOException { + throws InterruptedException, YarnException, IOException { waitForContainerState(containerManager, containerID, finalState, 20); } public static void waitForContainerState(ContainerManager containerManager, ContainerId containerID, ContainerState finalState, int timeOutMax) - throws InterruptedException, YarnRemoteException, IOException { + throws InterruptedException, YarnException, IOException { GetContainerStatusRequest request = recordFactory.newRecordInstance(GetContainerStatusRequest.class); request.setContainerId(containerID); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java index 976e94c..40e6a8e 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/TestContainerManager.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.URL; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.api.ResourceManagerConstants; import org.apache.hadoop.yarn.server.nodemanager.CMgrCompletedAppsEvent; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; @@ -94,7 +94,7 @@ public void testContainerManagerInitialization() throws IOException { ContainerId cId = createContainerId(); request.setContainerId(cId); containerManager.getContainerStatus(request); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throwsException = true; } Assert.assertTrue(throwsException); @@ -102,7 +102,7 @@ public void testContainerManagerInitialization() throws IOException { @Test public void testContainerSetup() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { containerManager.start(); @@ -184,7 +184,7 @@ public void testContainerSetup() throws IOException, InterruptedException, @Test public void testContainerLaunchAndStop() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile"); @@ -287,7 +287,7 @@ public void testContainerLaunchAndStop() throws IOException, } private void testContainerLaunchAndExit(int exitCode) throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile"); PrintWriter fileWriter = new PrintWriter(scriptFile); @@ -362,7 +362,7 @@ private void testContainerLaunchAndExit(int exitCode) throws IOException, @Test public void testContainerLaunchAndExitSuccess() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); int exitCode = 0; @@ -373,7 +373,7 @@ public void testContainerLaunchAndExitSuccess() throws IOException, @Test public void testContainerLaunchAndExitFailure() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { containerManager.start(); int exitCode = 50; @@ -384,7 +384,7 @@ public void testContainerLaunchAndExitFailure() throws IOException, @Test public void testLocalFilesCleanup() throws InterruptedException, - IOException, YarnRemoteException { + IOException, YarnException { // Real del service delSrvc = new DeletionService(exec); delSrvc.init(conf); @@ -524,7 +524,7 @@ public void testContainerLaunchFromPreviousRM() throws IOException, boolean catchException = false; try { containerManager.startContainer(startRequest1); - } catch (YarnRemoteException e) { + } catch (YarnException e) { catchException = true; Assert.assertTrue(e.getMessage().contains( "Container " + cId1 + " rejected as it is allocated by a previous RM")); @@ -549,10 +549,10 @@ public void testContainerLaunchFromPreviousRM() throws IOException, boolean noException = true; try { containerManager.startContainer(startRequest2); - } catch (YarnRemoteException e) { + } catch (YarnException e) { noException = false; } - // Verify that startContainer get no YarnRemoteException + // Verify that startContainer gets no YarnException Assert.assertTrue(noException); } } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java index 38d5136..b42ed02 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestContainerLocalizer.java @@ -58,7 +58,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; @@ -205,7 +205,7 @@ public void testContainerLocalizerClosesFilesystems() throws Exception { // verify filesystems are closed when localizer fails localizer = setupContainerLocalizerForTest(); - doThrow(new YarnException("Forced Failure")).when(localizer).localizeFiles( + doThrow(new YarnRuntimeException("Forced Failure")).when(localizer).localizeFiles( any(LocalizationProtocol.class), any(CompletionService.class), any(UserGroupInformation.class)); verify(localizer, never()).closeFileSystems( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java index 057d7cc..cc2f7ee 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestLocalCacheDirectoryManager.java @@ -21,7 +21,7 @@ import junit.framework.Assert; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.Test; @@ -80,7 +80,7 @@ public void testMinimumPerDirectoryFileLimit() { e = e1; } Assert.assertNotNull(e); - Assert.assertEquals(YarnException.class, e.getClass()); + Assert.assertEquals(YarnRuntimeException.class, e.getClass()); Assert.assertEquals(e.getMessage(), YarnConfiguration.NM_LOCAL_CACHE_MAX_FILES_PER_DIRECTORY + " parameter is configured with a value less than 37."); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java index 3626626..f9d6312 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/localizer/TestResourceLocalizationService.java @@ -85,7 +85,7 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.DeletionService; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; @@ -1014,7 +1014,7 @@ private LocalizerStatus createLocalizerStatusForFailedResource( LocalizerStatus status = createLocalizerStatus(localizerId); LocalResourceStatus resourceStatus = new LocalResourceStatusPBImpl(); resourceStatus.setException(YarnServerBuilderUtils - .newSerializedException(new YarnRemoteException("test"))); + .newSerializedException(new YarnException("test"))); resourceStatus.setStatus(ResourceStatusType.FETCH_FAILURE); resourceStatus.setResource(req); status.addResourceStatus(resourceStatus); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java index f8384f1..6a93365 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java @@ -59,7 +59,7 @@ import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; @@ -78,7 +78,7 @@ import org.apache.hadoop.yarn.event.DrainDispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat; @@ -431,7 +431,7 @@ public void testVerifyAndCreateRemoteDirsFailure() super.dirsHandler)); logAggregationService.init(this.conf); - YarnException e = new YarnException("KABOOM!"); + YarnRuntimeException e = new YarnRuntimeException("KABOOM!"); doThrow(e) .when(logAggregationService).verifyAndCreateRemoteLogDir( any(Configuration.class)); @@ -528,7 +528,7 @@ public void testLogAggregationInitAppFailsWithoutKillingNM() throws Exception { ApplicationId appId = BuilderUtils.newApplicationId( System.currentTimeMillis(), (int)Math.random()); - doThrow(new YarnException("KABOOM!")) + doThrow(new YarnRuntimeException("KABOOM!")) .when(logAggregationService).initAppAggregator( eq(appId), eq(user), any(Credentials.class), any(ContainerLogsRetentionPolicy.class), anyMap()); @@ -708,7 +708,7 @@ private void verifyContainerLogs( @Test public void testLogAggregationForRealContainerLaunch() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { this.containerManager.start(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java index bd8a5f6..f51f823 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/TestContainersMonitor.java @@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; @@ -179,7 +179,7 @@ public void testProcessTreeLimits() throws IOException { @Test public void testContainerKillOnMemoryOverflow() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { if (!ProcfsBasedProcessTree.isAvailable()) { return; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java index e1fb6d3..e023b3a 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsRequest; import org.apache.hadoop.yarn.api.protocolrecords.RefreshUserToGroupsMappingsResponse; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -133,7 +133,7 @@ public void stop() { super.stop(); } - private UserGroupInformation checkAcls(String method) throws YarnRemoteException { + private UserGroupInformation checkAcls(String method) throws YarnException { UserGroupInformation user; try { user = UserGroupInformation.getCurrentUser(); @@ -168,7 +168,7 @@ private UserGroupInformation checkAcls(String method) throws YarnRemoteException @Override public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshQueues"); try { scheduler.reinitialize(conf, this.rmContext); @@ -186,7 +186,7 @@ public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request) @Override public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshNodes"); try { this.nodesListManager.refreshNodes(new YarnConfiguration()); @@ -204,7 +204,7 @@ public RefreshNodesResponse refreshNodes(RefreshNodesRequest request) @Override public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest request) - throws YarnRemoteException { + throws YarnException { UserGroupInformation user = checkAcls("refreshSuperUserGroupsConfiguration"); ProxyUsers.refreshSuperUserGroupsConfiguration(new Configuration()); @@ -217,7 +217,7 @@ public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfigu @Override public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( - RefreshUserToGroupsMappingsRequest request) throws YarnRemoteException { + RefreshUserToGroupsMappingsRequest request) throws YarnException { UserGroupInformation user = checkAcls("refreshUserToGroupsMappings"); Groups.getUserToGroupsMappingService().refresh(); @@ -230,7 +230,7 @@ public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings( @Override public RefreshAdminAclsResponse refreshAdminAcls( - RefreshAdminAclsRequest request) throws YarnRemoteException { + RefreshAdminAclsRequest request) throws YarnException { UserGroupInformation user = checkAcls("refreshAdminAcls"); Configuration conf = new Configuration(); @@ -245,7 +245,7 @@ public RefreshAdminAclsResponse refreshAdminAcls( @Override public RefreshServiceAclsResponse refreshServiceAcls( - RefreshServiceAclsRequest request) throws YarnRemoteException { + RefreshServiceAclsRequest request) throws YarnException { Configuration conf = new Configuration(); if (!conf.getBoolean( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java index 3094a93..848e978 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java @@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -139,7 +139,7 @@ public InetSocketAddress getBindAddress() { } private void authorizeRequest(ApplicationAttemptId appAttemptID) - throws YarnRemoteException { + throws YarnException { if (!UserGroupInformation.isSecurityEnabled()) { return; @@ -169,7 +169,7 @@ private void authorizeRequest(ApplicationAttemptId appAttemptID) @Override public RegisterApplicationMasterResponse registerApplicationMaster( - RegisterApplicationMasterRequest request) throws YarnRemoteException, + RegisterApplicationMasterRequest request) throws YarnException, IOException { ApplicationAttemptId applicationAttemptId = request @@ -219,7 +219,7 @@ public RegisterApplicationMasterResponse registerApplicationMaster( @Override public FinishApplicationMasterResponse finishApplicationMaster( - FinishApplicationMasterRequest request) throws YarnRemoteException, + FinishApplicationMasterRequest request) throws YarnException, IOException { ApplicationAttemptId applicationAttemptId = request @@ -252,7 +252,7 @@ public FinishApplicationMasterResponse finishApplicationMaster( @Override public AllocateResponse allocate(AllocateRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { ApplicationAttemptId appAttemptId = request.getApplicationAttemptId(); authorizeRequest(appAttemptId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java index bbb3ab7..651ad85 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java @@ -72,7 +72,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnClusterMetrics; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -204,7 +204,7 @@ ApplicationId getNewApplicationId() { @Override public GetNewApplicationResponse getNewApplication( - GetNewApplicationRequest request) throws YarnRemoteException { + GetNewApplicationRequest request) throws YarnException { GetNewApplicationResponse response = recordFactory .newRecordInstance(GetNewApplicationResponse.class); response.setApplicationId(getNewApplicationId()); @@ -223,7 +223,7 @@ public GetNewApplicationResponse getNewApplication( */ @Override public GetApplicationReportResponse getApplicationReport( - GetApplicationReportRequest request) throws YarnRemoteException { + GetApplicationReportRequest request) throws YarnException { ApplicationId applicationId = request.getApplicationId(); UserGroupInformation callerUGI; @@ -255,7 +255,7 @@ public GetApplicationReportResponse getApplicationReport( @Override public SubmitApplicationResponse submitApplication( - SubmitApplicationRequest request) throws YarnRemoteException { + SubmitApplicationRequest request) throws YarnException { ApplicationSubmissionContext submissionContext = request .getApplicationSubmissionContext(); ApplicationId applicationId = submissionContext.getApplicationId(); @@ -316,7 +316,7 @@ public SubmitApplicationResponse submitApplication( " submitted by user " + user); RMAuditLogger.logSuccess(user, AuditConstants.SUBMIT_APP_REQUEST, "ClientRMService", applicationId); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Exception in submitting application with id " + applicationId.getId(), e); RMAuditLogger.logFailure(user, AuditConstants.SUBMIT_APP_REQUEST, @@ -333,7 +333,7 @@ public SubmitApplicationResponse submitApplication( @SuppressWarnings("unchecked") @Override public KillApplicationResponse forceKillApplication( - KillApplicationRequest request) throws YarnRemoteException { + KillApplicationRequest request) throws YarnException { ApplicationId applicationId = request.getApplicationId(); @@ -382,7 +382,7 @@ public KillApplicationResponse forceKillApplication( @Override public GetClusterMetricsResponse getClusterMetrics( - GetClusterMetricsRequest request) throws YarnRemoteException { + GetClusterMetricsRequest request) throws YarnException { GetClusterMetricsResponse response = recordFactory .newRecordInstance(GetClusterMetricsResponse.class); YarnClusterMetrics ymetrics = recordFactory @@ -394,7 +394,7 @@ public GetClusterMetricsResponse getClusterMetrics( @Override public GetAllApplicationsResponse getAllApplications( - GetAllApplicationsRequest request) throws YarnRemoteException { + GetAllApplicationsRequest request) throws YarnException { UserGroupInformation callerUGI; try { @@ -419,7 +419,7 @@ public GetAllApplicationsResponse getAllApplications( @Override public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) - throws YarnRemoteException { + throws YarnException { GetClusterNodesResponse response = recordFactory.newRecordInstance(GetClusterNodesResponse.class); Collection nodes = this.rmContext.getRMNodes().values(); @@ -433,7 +433,7 @@ public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request) @Override public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request) - throws YarnRemoteException { + throws YarnException { GetQueueInfoResponse response = recordFactory.newRecordInstance(GetQueueInfoResponse.class); try { @@ -482,7 +482,7 @@ private NodeReport createNodeReports(RMNode rmNode) { @Override public GetQueueUserAclsInfoResponse getQueueUserAcls( - GetQueueUserAclsInfoRequest request) throws YarnRemoteException { + GetQueueUserAclsInfoRequest request) throws YarnException { GetQueueUserAclsInfoResponse response = recordFactory.newRecordInstance(GetQueueUserAclsInfoResponse.class); response.setUserAclsInfoList(scheduler.getQueueUserAclInfo()); @@ -492,7 +492,7 @@ public GetQueueUserAclsInfoResponse getQueueUserAcls( @Override public GetDelegationTokenResponse getDelegationToken( - GetDelegationTokenRequest request) throws YarnRemoteException { + GetDelegationTokenRequest request) throws YarnException { try { // Verify that the connection is kerberos authenticated @@ -530,7 +530,7 @@ public GetDelegationTokenResponse getDelegationToken( @Override public RenewDelegationTokenResponse renewDelegationToken( - RenewDelegationTokenRequest request) throws YarnRemoteException { + RenewDelegationTokenRequest request) throws YarnException { try { if (!isAllowedDelegationTokenOp()) { throw new IOException( @@ -555,7 +555,7 @@ public RenewDelegationTokenResponse renewDelegationToken( @Override public CancelDelegationTokenResponse cancelDelegationToken( - CancelDelegationTokenRequest request) throws YarnRemoteException { + CancelDelegationTokenRequest request) throws YarnException { try { if (!isAllowedDelegationTokenOp()) { throw new IOException( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java index 41b5881..1c502b0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/NodesListManager.java @@ -29,7 +29,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.HostsFileReader; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -80,7 +80,7 @@ public void init(Configuration conf) { } catch (IOException ioe2) { // Should *never* happen this.hostsReader = null; - throw new YarnException(ioe2); + throw new YarnRuntimeException(ioe2); } } super.init(conf); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java index fdf45b0..b2a77e2 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -239,7 +239,7 @@ protected synchronized void checkAppNumCompletedLimit() { @SuppressWarnings("unchecked") protected void submitApplication( ApplicationSubmissionContext submissionContext, long submitTime, - boolean isRecovered, String user) throws YarnRemoteException { + boolean isRecovered, String user) throws YarnException { ApplicationId applicationId = submissionContext.getApplicationId(); // Validation of the ApplicationSubmissionContext needs to be completed diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java index b8208a2..47f8345 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java @@ -34,7 +34,7 @@ import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; @@ -297,11 +297,11 @@ protected ResourceScheduler createScheduler() { return (ResourceScheduler) ReflectionUtils.newInstance(schedulerClazz, this.conf); } else { - throw new YarnException("Class: " + schedulerClassName + throw new YarnRuntimeException("Class: " + schedulerClassName + " not instance of " + ResourceScheduler.class.getCanonicalName()); } } catch (ClassNotFoundException e) { - throw new YarnException("Could not instantiate Scheduler: " + throw new YarnRuntimeException("Could not instantiate Scheduler: " + schedulerClassName, e); } } @@ -334,7 +334,7 @@ protected static void validateConfigs(Configuration conf) { conf.getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS); if (globalMaxAppAttempts <= 0) { - throw new YarnException("Invalid global max attempts configuration" + throw new YarnRuntimeException("Invalid global max attempts configuration" + ", " + YarnConfiguration.RM_AM_MAX_ATTEMPTS + "=" + globalMaxAppAttempts + ", it should be a positive integer."); } @@ -348,7 +348,7 @@ protected static void validateConfigs(Configuration conf) { YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); if (minMem <= 0 || minMem > maxMem) { - throw new YarnException("Invalid resource scheduler memory" + throw new YarnRuntimeException("Invalid resource scheduler memory" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB + "=" + minMem @@ -366,7 +366,7 @@ protected static void validateConfigs(Configuration conf) { YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES); if (minVcores <= 0 || minVcores > maxVcores) { - throw new YarnException("Invalid resource scheduler vcores" + throw new YarnRuntimeException("Invalid resource scheduler vcores" + " allocation configuration" + ", " + YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES + "=" + minVcores @@ -451,7 +451,7 @@ public synchronized void stop() { try { this.eventProcessor.join(); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } super.stop(); } @@ -470,7 +470,7 @@ public void handle(SchedulerEvent event) { } this.eventQueue.put(event); } catch (InterruptedException e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } } } @@ -578,7 +578,7 @@ public void start() { try { doSecureLogin(); } catch(IOException ie) { - throw new YarnException("Failed to login", ie); + throw new YarnRuntimeException("Failed to login", ie); } this.appTokenSecretManager.start(); @@ -603,7 +603,7 @@ public void start() { try { rmDTSecretManager.startThreads(); } catch(IOException ie) { - throw new YarnException("Failed to start secret manager threads", ie); + throw new YarnRuntimeException("Failed to start secret manager threads", ie); } if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) { diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java index 930473c..2577684 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java @@ -28,11 +28,11 @@ import org.apache.hadoop.net.Node; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.PolicyProvider; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -110,7 +110,7 @@ public synchronized void init(Configuration conf) { conf.getLong(YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS); if (nextHeartBeatInterval <= 0) { - throw new YarnException("Invalid Configuration. " + throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS + " should be larger than 0."); } @@ -161,7 +161,7 @@ public synchronized void stop() { @SuppressWarnings("unchecked") @Override public RegisterNodeManagerResponse registerNodeManager( - RegisterNodeManagerRequest request) throws YarnRemoteException, + RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); @@ -230,7 +230,7 @@ public RegisterNodeManagerResponse registerNodeManager( @SuppressWarnings("unchecked") @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { NodeStatus remoteNodeStatus = request.getNodeStatus(); /** diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java index b95d2aa..4ec82e4 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java @@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -97,7 +97,7 @@ private void connect() throws IOException { containerMgrProxy = getContainerMgrProxy(masterContainerID); } - private void launch() throws IOException, YarnRemoteException { + private void launch() throws IOException, YarnException { connect(); ContainerId masterContainerID = masterContainer.getId(); ApplicationSubmissionContext applicationContext = @@ -115,7 +115,7 @@ private void launch() throws IOException, YarnRemoteException { + " for AM " + application.getAppAttemptId()); } - private void cleanup() throws IOException, YarnRemoteException { + private void cleanup() throws IOException, YarnException { connect(); ContainerId containerId = masterContainer.getId(); StopContainerRequest stopRequest = @@ -245,7 +245,7 @@ public void run() { cleanup(); } catch(IOException ie) { LOG.info("Error cleaning master ", ie); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Error cleaning master ", e); } break; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java index d713efd..37610d0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java @@ -34,7 +34,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ExitUtil; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; @@ -389,7 +389,7 @@ private YarnApplicationState createApplicationState(RMAppState rmAppState) { case FAILED: return YarnApplicationState.FAILED; } - throw new YarnException("Unknown state passed!"); + throw new YarnRuntimeException("Unknown state passed!"); } private FinalApplicationStatus createFinalApplicationStatus(RMAppState state) { @@ -408,7 +408,7 @@ private FinalApplicationStatus createFinalApplicationStatus(RMAppState state) { case KILLED: return FinalApplicationStatus.KILLED; } - throw new YarnException("Unknown state passed!"); + throw new YarnRuntimeException("Unknown state passed!"); } @Override diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java index 3d1e7dd..27628f37 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/InvalidResourceRequestException.java @@ -18,14 +18,14 @@ package org.apache.hadoop.yarn.server.resourcemanager.scheduler; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; /** * The exception is thrown when the requested resource is out of the range * of the configured lower and upper resource boundaries. * */ -public class InvalidResourceRequestException extends YarnException { +public class InvalidResourceRequestException extends YarnRuntimeException { public InvalidResourceRequestException(Throwable cause) { super(cause); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java index 8e49144..e3ce389 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/Application.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.Task.State; @@ -127,7 +127,7 @@ public Resource getUsedResources() { return used; } - public synchronized void submit() throws IOException, YarnRemoteException { + public synchronized void submit() throws IOException, YarnException { ApplicationSubmissionContext context = recordFactory.newRecordInstance(ApplicationSubmissionContext.class); context.setApplicationId(this.applicationId); context.setQueue(this.queue); @@ -201,7 +201,7 @@ public synchronized void addTask(Task task) { } public synchronized void finishTask(Task task) throws IOException, - YarnRemoteException { + YarnException { Set tasks = this.tasks.get(task.getPriority()); if (!tasks.remove(task)) { throw new IllegalStateException( @@ -288,7 +288,7 @@ private synchronized void addResourceRequest( } public synchronized void assign(List containers) - throws IOException, YarnRemoteException { + throws IOException, YarnException { int numContainers = containers.size(); // Schedule in priority order @@ -307,12 +307,12 @@ public synchronized void assign(List containers) assignedContainers + "/" + numContainers); } - public synchronized void schedule() throws IOException, YarnRemoteException { + public synchronized void schedule() throws IOException, YarnException { assign(getResources()); } private synchronized void assign(Priority priority, NodeType type, - List containers) throws IOException, YarnRemoteException { + List containers) throws IOException, YarnException { for (Iterator i=containers.iterator(); i.hasNext();) { Container container = i.next(); String host = container.getNodeId().toString(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java index 08577c8..0174499 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java @@ -44,7 +44,7 @@ import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent; import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; @@ -206,7 +206,7 @@ public RMApp submitApp(int masterMemory, String name, String user, public SubmitApplicationResponse run() { try { return client.submitApplication(req); - } catch (YarnRemoteException e) { + } catch (YarnException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java index 5f25396..43c0144 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/NodeManager.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Token; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -83,7 +83,7 @@ public NodeManager(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability, ResourceTrackerService resourceTrackerService, RMContext rmContext) - throws IOException, YarnRemoteException { + throws IOException, YarnException { this.containerManagerAddress = hostName + ":" + containerManagerPort; this.nodeHttpAddress = hostName + ":" + httpPort; this.rackName = rackName; @@ -144,7 +144,7 @@ public Resource getUsed() { } return containerStatuses; } - public void heartbeat() throws IOException, YarnRemoteException { + public void heartbeat() throws IOException, YarnException { NodeStatus nodeStatus = org.apache.hadoop.yarn.server.resourcemanager.NodeManager.createNodeStatus( nodeId, getContainerStatuses(containers)); @@ -160,7 +160,7 @@ public void heartbeat() throws IOException, YarnRemoteException { @Override synchronized public StartContainerResponse startContainer( StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { Token containerToken = request.getContainerToken(); ContainerTokenIdentifier tokenId = null; @@ -226,7 +226,7 @@ synchronized public void checkResourceUsage() { @Override synchronized public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { ContainerId containerID = request.getContainerId(); String applicationId = String.valueOf( containerID.getApplicationAttemptId().getApplicationId().getId()); @@ -278,7 +278,7 @@ synchronized public StopContainerResponse stopContainer(StopContainerRequest req } @Override - synchronized public GetContainerStatusResponse getContainerStatus(GetContainerStatusRequest request) throws YarnRemoteException { + synchronized public GetContainerStatusResponse getContainerStatus(GetContainerStatusRequest request) throws YarnException { ContainerId containerId = request.getContainerId(); List appContainers = containers.get( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java index 23ee9fa..a25a280 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAMAuthorization.java @@ -45,7 +45,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; @@ -77,21 +77,21 @@ public MyContainerManager() { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { amTokens = request.getContainerLaunchContext().getTokens(); return null; } @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { // TODO Auto-generated method stub return null; } @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { // TODO Auto-generated method stub return null; } @@ -255,7 +255,7 @@ public AMRMProtocol run() { try { client.registerApplicationMaster(request); Assert.fail("Should fail with authorization error"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue(e.getMessage().contains( "Unauthorized request from ApplicationMaster. " + "Expected ApplicationAttemptID: " diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java index afd9513..71c2fe0 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; @@ -170,7 +170,7 @@ public void setCompletedAppsMax(int max) { } public void submitApplication( ApplicationSubmissionContext submissionContext, String user) - throws YarnRemoteException { + throws YarnException { super.submitApplication(submissionContext, System.currentTimeMillis(), false, user); } @@ -443,7 +443,7 @@ public void testRMAppSubmitDuplicateApplicationId() throws Exception { try { appMonitor.submitApplication(asContext, "test"); Assert.fail("Exception is expected when applicationId is duplicate."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue("The thrown exception is not the expectd one.", e.getMessage().contains("Cannot add a duplicate!")); } @@ -465,7 +465,7 @@ public void testRMAppSubmitInvalidResourceRequest() throws Exception { appMonitor.submitApplication(asContext, "test"); Assert.fail("Application submission should fail because resource" + " request is invalid."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { // Exception is expected // TODO Change this to assert the expected exception type - post YARN-142 // sub-task related to specialized exceptions. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java index 2f9aa6d..acb5e32 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationACLs.java @@ -46,7 +46,7 @@ import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -321,7 +321,7 @@ private void verifyEnemyAccess() throws Exception { try { enemyRmClient.forceKillApplication(finishAppRequest); Assert.fail("App killing by the enemy should fail!!"); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.info("Got exception while killing app as the enemy", e); Assert .assertTrue(e.getMessage().contains( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java index b4f67e8..3eac332 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java @@ -36,7 +36,7 @@ import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; @@ -68,7 +68,7 @@ @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { LOG.info("Container started by MyContainerManager: " + request); launched = true; Map env = @@ -97,7 +97,7 @@ @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { LOG.info("Container cleaned up by MyContainerManager"); cleanedup = true; return null; @@ -105,7 +105,7 @@ public StopContainerResponse stopContainer(StopContainerRequest request) @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { return null; } diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java index 661fbc2..d79d38f 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMService.java @@ -59,7 +59,7 @@ import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -147,7 +147,7 @@ protected ClientRMService createClientRMService() { } @Test - public void testGetApplicationReport() throws YarnRemoteException { + public void testGetApplicationReport() throws YarnException { RMContext rmContext = mock(RMContext.class); when(rmContext.getRMApps()).thenReturn( new ConcurrentHashMap()); @@ -209,7 +209,7 @@ public Void run() throws Exception { try { checkTokenRenewal(owner, other); return null; - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { Assert.assertTrue(ex.getMessage().contains( "Client " + owner.getUserName() + " tries to renew a token with renewer specified as " + @@ -237,7 +237,7 @@ public Void run() throws Exception { } private void checkTokenRenewal(UserGroupInformation owner, - UserGroupInformation renewer) throws IOException, YarnRemoteException { + UserGroupInformation renewer) throws IOException, YarnException { RMDelegationTokenIdentifier tokenIdentifier = new RMDelegationTokenIdentifier( new Text(owner.getUserName()), new Text(renewer.getUserName()), null); @@ -279,7 +279,7 @@ public void handle(Event event) {} appId1, null, null); try { rmService.submitApplication(submitRequest1); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } RMApp app1 = rmContext.getRMApps().get(appId1); @@ -297,7 +297,7 @@ public void handle(Event event) {} appId2, name, queue); try { rmService.submitApplication(submitRequest2); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.fail("Exception is not expected."); } RMApp app2 = rmContext.getRMApps().get(appId2); @@ -309,7 +309,7 @@ public void handle(Event event) {} try { rmService.submitApplication(submitRequest2); Assert.fail("Exception is expected."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { Assert.assertTrue("The thrown exception is not expected.", e.getMessage().contains("Cannot add a duplicate!")); } @@ -318,7 +318,7 @@ public void handle(Event event) {} @Test(timeout=4000) public void testConcurrentAppSubmit() throws IOException, InterruptedException, BrokenBarrierException, - YarnRemoteException { + YarnException { YarnScheduler yarnScheduler = mockYarnScheduler(); RMContext rmContext = mock(RMContext.class); mockRMContext(yarnScheduler, rmContext); @@ -368,7 +368,7 @@ public void handle(Event rawEvent) { public void run() { try { rmService.submitApplication(submitRequest1); - } catch (YarnRemoteException e) {} + } catch (YarnException e) {} } }; t.start(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java index fa19b65..209a3ee 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestClientRMTokens.java @@ -58,7 +58,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore; @@ -134,7 +134,7 @@ public void testDelegationToken() throws IOException, InterruptedException { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } @@ -159,7 +159,7 @@ public void testDelegationToken() throws IOException, InterruptedException { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } @@ -199,7 +199,7 @@ public void testDelegationToken() throws IOException, InterruptedException { clientRMWithDT.getNewApplication(request); } catch (IOException e) { fail("Unexpected exception" + e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { fail("Unexpected exception" + e); } cancelDelegationToken(loggedInUser, clientRMService, token); @@ -217,7 +217,7 @@ public void testDelegationToken() throws IOException, InterruptedException { clientRMWithDT.getNewApplication(request); fail("Should not have succeeded with a cancelled delegation token"); } catch (IOException e) { - } catch (YarnRemoteException e) { + } catch (YarnException e) { } @@ -357,7 +357,7 @@ public Server getServer(Class protocol, Object instance, .doAs(new PrivilegedExceptionAction() { @Override public org.apache.hadoop.yarn.api.records.Token run() - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetDelegationTokenRequest request = Records .newRecord(GetDelegationTokenRequest.class); request.setRenewer(renewerString); @@ -374,7 +374,7 @@ private long renewDelegationToken(final UserGroupInformation loggedInUser, throws IOException, InterruptedException { long nextExpTime = loggedInUser.doAs(new PrivilegedExceptionAction() { @Override - public Long run() throws YarnRemoteException, IOException { + public Long run() throws YarnException, IOException { RenewDelegationTokenRequest request = Records .newRecord(RenewDelegationTokenRequest.class); request.setDelegationToken(dToken); @@ -391,7 +391,7 @@ private void cancelDelegationToken(final UserGroupInformation loggedInUser, throws IOException, InterruptedException { loggedInUser.doAs(new PrivilegedExceptionAction() { @Override - public Void run() throws YarnRemoteException, IOException { + public Void run() throws YarnException, IOException { CancelDelegationTokenRequest request = Records .newRecord(CancelDelegationTokenRequest.class); request.setDelegationToken(dToken); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java index a4dfbf7..ffe8c81 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestResourceManager.java @@ -29,13 +29,13 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetworkTopology; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.records.NodeHealthStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.junit.After; @@ -62,7 +62,7 @@ public void tearDown() throws Exception { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability) throws IOException, - YarnRemoteException { + YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, httpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager @@ -71,7 +71,7 @@ public void tearDown() throws Exception { // @Test public void testResourceAllocation() throws IOException, - YarnRemoteException { + YarnException { LOG.info("--- START: testResourceAllocation ---"); final int memory = 4 * 1024; @@ -199,7 +199,7 @@ public void testResourceManagerInitConfigValidation() throws Exception { resourceManager.init(conf); fail("Exception is expected because the global max attempts" + " is negative."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( @@ -213,7 +213,7 @@ public void testResourceManagerInitConfigValidation() throws Exception { resourceManager.init(conf); fail("Exception is expected because the min memory allocation is" + " larger than the max memory allocation."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( @@ -227,7 +227,7 @@ public void testResourceManagerInitConfigValidation() throws Exception { resourceManager.init(conf); fail("Exception is expected because the min vcores allocation is" + " larger than the max vcores allocation."); - } catch (YarnException e) { + } catch (YarnRuntimeException e) { // Exception is expected. assertTrue("The thrown exception is not the expected one.", e.getMessage().startsWith( diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java index 04c24fc..75815cd 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/resourcetracker/TestRMNMRPCResponseId.java @@ -28,7 +28,7 @@ import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; @@ -88,7 +88,7 @@ public void tearDown() { } @Test - public void testRPCResponseId() throws IOException, YarnRemoteException { + public void testRPCResponseId() throws IOException, YarnException { String node = "localhost"; Resource capability = BuilderUtils.newResource(1024, 1); RegisterNodeManagerRequest request = recordFactory.newRecordInstance(RegisterNodeManagerRequest.class); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java index 6e37df4..29fae23 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestCapacityScheduler.java @@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.resourcemanager.Application; import org.apache.hadoop.yarn.server.resourcemanager.MockNodes; import org.apache.hadoop.yarn.server.resourcemanager.RMContextImpl; @@ -101,7 +101,7 @@ public void tearDown() throws Exception { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int httpPort, String rackName, Resource capability) - throws IOException, YarnRemoteException { + throws IOException, YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, httpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java index d252421..4022ab9 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fifo/TestFifoScheduler.java @@ -38,7 +38,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.AsyncDispatcher; import org.apache.hadoop.yarn.event.InlineDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.Application; @@ -87,7 +87,7 @@ public void tearDown() throws Exception { private org.apache.hadoop.yarn.server.resourcemanager.NodeManager registerNode(String hostName, int containerManagerPort, int nmHttpPort, String rackName, Resource capability) throws IOException, - YarnRemoteException { + YarnException { return new org.apache.hadoop.yarn.server.resourcemanager.NodeManager( hostName, containerManagerPort, nmHttpPort, rackName, capability, resourceManager.getResourceTrackerService(), resourceManager diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java index e90b8bd..c38c106 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientTokens.java @@ -44,7 +44,7 @@ import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenInfo; import org.apache.hadoop.security.token.TokenSelector; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; @@ -59,7 +59,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.event.DrainDispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.security.client.ClientToAMTokenSecretManager; import org.apache.hadoop.yarn.security.client.ClientTokenIdentifier; import org.apache.hadoop.yarn.security.client.ClientTokenSelector; @@ -80,7 +80,7 @@ @SuppressWarnings("unused") public static final long versionID = 1L; - public void ping() throws YarnRemoteException, IOException; + public void ping() throws YarnException, IOException; } private static class CustomSecurityInfo extends SecurityInfo { @@ -123,7 +123,7 @@ public CustomAM(ApplicationAttemptId appId, String secretKeyStr) { } @Override - public void ping() throws YarnRemoteException, IOException { + public void ping() throws YarnException, IOException { this.pinged = true; } @@ -141,7 +141,7 @@ public synchronized void start() { .setNumHandlers(1).setSecretManager(secretManager) .setInstance(this).build(); } catch (Exception e) { - throw new YarnException(e); + throw new YarnRuntimeException(e); } server.start(); this.address = NetUtils.getConnectAddress(server); @@ -155,7 +155,7 @@ public synchronized void start() { @Override public StartContainerResponse startContainer(StartContainerRequest request) - throws YarnRemoteException { + throws YarnException { this.clientTokensSecret = request.getContainerLaunchContext().getEnvironment() .get(ApplicationConstants.APPLICATION_CLIENT_SECRET_ENV_NAME); @@ -164,13 +164,13 @@ public StartContainerResponse startContainer(StartContainerRequest request) @Override public StopContainerResponse stopContainer(StopContainerRequest request) - throws YarnRemoteException { + throws YarnException { return null; } @Override public GetContainerStatusResponse getContainerStatus( - GetContainerStatusRequest request) throws YarnRemoteException { + GetContainerStatusRequest request) throws YarnException { return null; } @@ -283,7 +283,7 @@ public Void run() throws Exception { fail("Connection initiation with illegally modified " + "tokens is expected to fail."); return null; - } catch (YarnRemoteException ex) { + } catch (YarnException ex) { fail("Cannot get a YARN remote exception as " + "it will indicate RPC success"); throw ex; diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 979d1c3..7d4bbefb 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -31,10 +31,10 @@ import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.event.Dispatcher; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -92,7 +92,7 @@ public MiniYARNCluster(String testName, int noOfNodeManagers, new Path(targetWorkDir.getAbsolutePath()), true); } catch (Exception e) { LOG.warn("COULD NOT CLEANUP", e); - throw new YarnException("could not cleanup test dir", e); + throw new YarnRuntimeException("could not cleanup test dir", e); } if (Shell.WINDOWS) { @@ -109,7 +109,7 @@ public MiniYARNCluster(String testName, int noOfNodeManagers, try { FileContext.getLocalFSFileContext().delete(new Path(linkPath), true); } catch (IOException e) { - throw new YarnException("could not cleanup symlink: " + linkPath, e); + throw new YarnRuntimeException("could not cleanup symlink: " + linkPath, e); } // Guarantee target exists before creating symlink. @@ -120,7 +120,7 @@ public MiniYARNCluster(String testName, int noOfNodeManagers, try { shexec.execute(); } catch (IOException e) { - throw new YarnException(String.format( + throw new YarnRuntimeException(String.format( "failed to create symlink from %s to %s, shell output: %s", linkPath, targetPath, shexec.getOutput()), e); } @@ -216,7 +216,7 @@ public void run() { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } LOG.info("MiniYARN ResourceManager address: " + getConfig().get(YarnConfiguration.RM_ADDRESS)); @@ -321,7 +321,7 @@ public void run() { } super.start(); } catch (Throwable t) { - throw new YarnException(t); + throw new YarnRuntimeException(t); } } @@ -357,13 +357,13 @@ protected ResourceTracker getRMClient() { @Override public NodeHeartbeatResponse nodeHeartbeat( - NodeHeartbeatRequest request) throws YarnRemoteException, + NodeHeartbeatRequest request) throws YarnException, IOException { NodeHeartbeatResponse response = recordFactory.newRecordInstance( NodeHeartbeatResponse.class); try { response = rt.nodeHeartbeat(request); - } catch (YarnRemoteException ioe) { + } catch (YarnException ioe) { LOG.info("Exception in heartbeat from node " + request.getNodeStatus().getNodeId(), ioe); throw RPCUtil.getRemoteException(ioe); @@ -374,12 +374,12 @@ public NodeHeartbeatResponse nodeHeartbeat( @Override public RegisterNodeManagerResponse registerNodeManager( RegisterNodeManagerRequest request) - throws YarnRemoteException, IOException { + throws YarnException, IOException { RegisterNodeManagerResponse response = recordFactory. newRecordInstance(RegisterNodeManagerResponse.class); try { response = rt.registerNodeManager(request); - } catch (YarnRemoteException ioe) { + } catch (YarnException ioe) { LOG.info("Exception in node registration from " + request.getNodeId().toString(), ioe); throw RPCUtil.getRemoteException(ioe); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java index c83ba18..6a5c396 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java @@ -63,7 +63,7 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -129,7 +129,7 @@ private void testContainerManager() throws Exception { } private void testAuthenticatedUser() throws IOException, - InterruptedException, YarnRemoteException { + InterruptedException, YarnException { LOG.info("Running test for authenticated user"); @@ -188,10 +188,10 @@ public Void run() throws Exception { * * @throws IOException * @throws InterruptedException - * @throws YarnRemoteException + * @throws YarnException */ private void testMaliceUser() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { LOG.info("Running test for malice user"); @@ -286,7 +286,7 @@ public Void run() { client.startContainer(request); fail("Connection initiation with illegally modified " + "tokens is expected to fail."); - } catch (YarnRemoteException e) { + } catch (YarnException e) { LOG.error("Got exception", e); fail("Cannot get a YARN remote exception as " + "it will indicate RPC success"); @@ -306,7 +306,7 @@ public Void run() { } private void testExpiredTokens() throws IOException, InterruptedException, - YarnRemoteException { + YarnException { LOG.info("\n\nRunning test for malice user"); @@ -406,7 +406,7 @@ public Void run() { private AMRMProtocol submitAndRegisterApplication( ResourceManager resourceManager, final YarnRPC yarnRPC, ApplicationId appID) throws IOException, - UnsupportedFileSystemException, YarnRemoteException, + UnsupportedFileSystemException, YarnException, InterruptedException { // Use ping to simulate sleep on Windows. @@ -491,7 +491,7 @@ public AMRMProtocol run() { } private Container requestAndGetContainer(AMRMProtocol scheduler, - ApplicationId appID) throws YarnRemoteException, InterruptedException, + ApplicationId appID) throws YarnException, InterruptedException, IOException { // Request a container allocation. diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java index 120cf72..a5aad90 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/AppReportFetcher.java @@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; @@ -78,11 +78,11 @@ public AppReportFetcher(Configuration conf, ClientRMProtocol applicationsManager * Get a report for the specified app. * @param appId the id of the application to get. * @return the ApplicationReport for that app. - * @throws YarnRemoteException on any error. + * @throws YarnException on any error. * @throws IOException */ public ApplicationReport getApplicationReport(ApplicationId appId) - throws YarnRemoteException, IOException { + throws YarnException, IOException { GetApplicationReportRequest request = recordFactory .newRecordInstance(GetApplicationReportRequest.class); request.setApplicationId(appId); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java index cec30d1..4a9077b 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxy.java @@ -26,7 +26,7 @@ import org.apache.hadoop.http.HttpServer; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.AbstractService; @@ -69,7 +69,7 @@ public void init(Configuration conf) { fetcher = new AppReportFetcher(conf); bindAddress = conf.get(YarnConfiguration.PROXY_ADDRESS); if(bindAddress == null || bindAddress.isEmpty()) { - throw new YarnException(YarnConfiguration.PROXY_ADDRESS + + throw new YarnRuntimeException(YarnConfiguration.PROXY_ADDRESS + " is not set so the proxy will not run."); } LOG.info("Instantiating Proxy at " + bindAddress); @@ -97,7 +97,7 @@ public void start() { proxyServer.start(); } catch (IOException e) { LOG.fatal("Could not start proxy web server",e); - throw new YarnException("Could not start proxy web server",e); + throw new YarnRuntimeException("Could not start proxy web server",e); } super.start(); } @@ -109,7 +109,7 @@ public void stop() { proxyServer.stop(); } catch (Exception e) { LOG.fatal("Error stopping proxy web server", e); - throw new YarnException("Error stopping proxy web server",e); + throw new YarnRuntimeException("Error stopping proxy web server",e); } } super.stop(); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java index c824cfb..b0ba8a1 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServer.java @@ -26,7 +26,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.yarn.YarnException; +import org.apache.hadoop.yarn.YarnRuntimeException; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.service.CompositeService; @@ -56,7 +56,7 @@ public synchronized void init(Configuration conf) { try { doSecureLogin(conf); } catch(IOException ie) { - throw new YarnException("Proxy Server Failed to login", ie); + throw new YarnRuntimeException("Proxy Server Failed to login", ie); } proxy = new WebAppProxy(); addService(proxy); diff --git hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java index a44d084..5fd426c 100644 --- hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java +++ hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/src/main/java/org/apache/hadoop/yarn/server/webproxy/WebAppProxyServlet.java @@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.conf.YarnConfiguration; -import org.apache.hadoop.yarn.exceptions.YarnRemoteException; +import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.StringHelper; import org.apache.hadoop.yarn.util.TrackingUriPlugin; @@ -217,7 +217,7 @@ private boolean isSecurityEnabled() { } private ApplicationReport getApplicationReport(ApplicationId id) - throws IOException, YarnRemoteException { + throws IOException, YarnException { return ((AppReportFetcher) getServletContext() .getAttribute(WebAppProxy.FETCHER_ATTRIBUTE)).getApplicationReport(id); } @@ -335,7 +335,7 @@ protected void doGet(HttpServletRequest req, HttpServletResponse resp) } catch(URISyntaxException e) { throw new IOException(e); - } catch (YarnRemoteException e) { + } catch (YarnException e) { throw new IOException(e); } }