Search in sources :

Example 6 with RunningAppContext

use of org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext in project hadoop by apache.

the class TestStagingCleanup method testDeletionofStagingOnUnregistrationFailure.

@SuppressWarnings("resource")
private void testDeletionofStagingOnUnregistrationFailure(int maxAttempts, boolean shouldHaveDeleted) throws IOException {
    conf.set(MRJobConfig.MAPREDUCE_JOB_DIR, stagingJobDir);
    fs = mock(FileSystem.class);
    when(fs.delete(any(Path.class), anyBoolean())).thenReturn(true);
    //Staging Dir exists
    String user = UserGroupInformation.getCurrentUser().getShortUserName();
    Path stagingDir = MRApps.getStagingAreaDir(conf, user);
    when(fs.exists(stagingDir)).thenReturn(true);
    ApplicationId appId = ApplicationId.newInstance(0, 1);
    ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(appId, 1);
    JobId jobid = recordFactory.newRecordInstance(JobId.class);
    jobid.setAppId(appId);
    TestMRApp appMaster = new TestMRApp(attemptId, null, JobStateInternal.RUNNING, maxAttempts);
    appMaster.crushUnregistration = true;
    appMaster.init(conf);
    appMaster.start();
    appMaster.shutDownJob();
    ((RunningAppContext) appMaster.getContext()).resetIsLastAMRetry();
    if (shouldHaveDeleted) {
        Assert.assertEquals(new Boolean(true), appMaster.isLastAMRetry());
        verify(fs).delete(stagingJobPath, true);
    } else {
        Assert.assertEquals(new Boolean(false), appMaster.isLastAMRetry());
        verify(fs, never()).delete(stagingJobPath, true);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) RunningAppContext(org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext) FileSystem(org.apache.hadoop.fs.FileSystem) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Matchers.anyBoolean(org.mockito.Matchers.anyBoolean) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId)

Example 7 with RunningAppContext

use of org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext in project hadoop by apache.

the class TestKillAMPreemptionPolicy method testKillAMPreemptPolicy.

@SuppressWarnings("unchecked")
@Test
public void testKillAMPreemptPolicy() {
    ApplicationId appId = ApplicationId.newInstance(123456789, 1);
    ContainerId container = ContainerId.newContainerId(ApplicationAttemptId.newInstance(appId, 1), 1);
    AMPreemptionPolicy.Context mPctxt = mock(AMPreemptionPolicy.Context.class);
    when(mPctxt.getTaskAttempt(any(ContainerId.class))).thenReturn(MRBuilderUtils.newTaskAttemptId(MRBuilderUtils.newTaskId(MRBuilderUtils.newJobId(appId, 1), 1, TaskType.MAP), 0));
    List<Container> p = new ArrayList<Container>();
    p.add(Container.newInstance(container, null, null, null, null, null));
    when(mPctxt.getContainers(any(TaskType.class))).thenReturn(p);
    KillAMPreemptionPolicy policy = new KillAMPreemptionPolicy();
    // strictContract is null & contract is null
    RunningAppContext mActxt = getRunningAppContext();
    policy.init(mActxt);
    PreemptionMessage pM = getPreemptionMessage(false, false, container);
    policy.preempt(mPctxt, pM);
    verify(mActxt.getEventHandler(), times(0)).handle(any(TaskAttemptEvent.class));
    verify(mActxt.getEventHandler(), times(0)).handle(any(JobCounterUpdateEvent.class));
    // strictContract is not null & contract is null
    mActxt = getRunningAppContext();
    policy.init(mActxt);
    pM = getPreemptionMessage(true, false, container);
    policy.preempt(mPctxt, pM);
    verify(mActxt.getEventHandler(), times(2)).handle(any(TaskAttemptEvent.class));
    verify(mActxt.getEventHandler(), times(2)).handle(any(JobCounterUpdateEvent.class));
    // strictContract is null & contract is not null
    mActxt = getRunningAppContext();
    policy.init(mActxt);
    pM = getPreemptionMessage(false, true, container);
    policy.preempt(mPctxt, pM);
    verify(mActxt.getEventHandler(), times(2)).handle(any(TaskAttemptEvent.class));
    verify(mActxt.getEventHandler(), times(2)).handle(any(JobCounterUpdateEvent.class));
    // strictContract is not null & contract is not null
    mActxt = getRunningAppContext();
    policy.init(mActxt);
    pM = getPreemptionMessage(true, true, container);
    policy.preempt(mPctxt, pM);
    verify(mActxt.getEventHandler(), times(4)).handle(any(TaskAttemptEvent.class));
    verify(mActxt.getEventHandler(), times(4)).handle(any(JobCounterUpdateEvent.class));
}
Also used : PreemptionMessage(org.apache.hadoop.yarn.api.records.PreemptionMessage) RunningAppContext(org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext) ArrayList(java.util.ArrayList) KillAMPreemptionPolicy(org.apache.hadoop.mapreduce.v2.app.rm.preemption.KillAMPreemptionPolicy) TaskAttemptEvent(org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent) JobCounterUpdateEvent(org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent) PreemptionContainer(org.apache.hadoop.yarn.api.records.PreemptionContainer) Container(org.apache.hadoop.yarn.api.records.Container) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) TaskType(org.apache.hadoop.mapreduce.v2.api.records.TaskType) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) KillAMPreemptionPolicy(org.apache.hadoop.mapreduce.v2.app.rm.preemption.KillAMPreemptionPolicy) AMPreemptionPolicy(org.apache.hadoop.mapreduce.v2.app.rm.preemption.AMPreemptionPolicy) Test(org.junit.Test)

Aggregations

RunningAppContext (org.apache.hadoop.mapreduce.v2.app.MRAppMaster.RunningAppContext)5 FileSystem (org.apache.hadoop.fs.FileSystem)2 Path (org.apache.hadoop.fs.Path)2 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)2 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)2 JobCounterUpdateEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent)2 TaskAttemptEvent (org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent)2 JobImpl (org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 VisibleForTesting (com.google.common.annotations.VisibleForTesting)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 Configuration (org.apache.hadoop.conf.Configuration)1 LocalContainerLauncher (org.apache.hadoop.mapred.LocalContainerLauncher)1 JobHistoryCopyService (org.apache.hadoop.mapreduce.jobhistory.JobHistoryCopyService)1 JobHistoryEvent (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent)1 TaskType (org.apache.hadoop.mapreduce.v2.api.records.TaskType)1 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)1 JobFinishEvent (org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent)1 ContainerLauncher (org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher)1