Search in sources :

Example 1 with TaskCompletionEvent

use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.

the class TestUberAM method testFailingMapper.

@Override
@Test
public void testFailingMapper() throws IOException, InterruptedException, ClassNotFoundException {
    LOG.info("\n\n\nStarting uberized testFailingMapper().");
    if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
        return;
    }
    Job job = runFailingMapperJob();
    // should be able to get diags for single task attempt...
    TaskID taskID = new TaskID(job.getJobID(), TaskType.MAP, 0);
    TaskAttemptID aId = new TaskAttemptID(taskID, 0);
    System.out.println("Diagnostics for " + aId + " :");
    for (String diag : job.getTaskDiagnostics(aId)) {
        System.out.println(diag);
    }
    // ...but not for second (shouldn't exist:  uber-AM overrode max attempts)
    boolean secondTaskAttemptExists = true;
    try {
        aId = new TaskAttemptID(taskID, 1);
        System.out.println("Diagnostics for " + aId + " :");
        for (String diag : job.getTaskDiagnostics(aId)) {
            System.out.println(diag);
        }
    } catch (Exception e) {
        secondTaskAttemptExists = false;
    }
    Assert.assertEquals(false, secondTaskAttemptExists);
    TaskCompletionEvent[] events = job.getTaskCompletionEvents(0, 2);
    Assert.assertEquals(1, events.length);
    // TIPFAILED if it comes from the AM, FAILED if it comes from the JHS
    TaskCompletionEvent.Status status = events[0].getStatus();
    Assert.assertTrue(status == TaskCompletionEvent.Status.FAILED || status == TaskCompletionEvent.Status.TIPFAILED);
    Assert.assertEquals(JobStatus.State.FAILED, job.getJobState());
//Disabling till UberAM honors MRJobConfig.MAP_MAX_ATTEMPTS
//verifyFailingMapperCounters(job);
// TODO later:  add explicit "isUber()" checks of some sort
}
Also used : TaskID(org.apache.hadoop.mapreduce.TaskID) TaskCompletionEvent(org.apache.hadoop.mapreduce.TaskCompletionEvent) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) Job(org.apache.hadoop.mapreduce.Job) File(java.io.File) IOException(java.io.IOException) Test(org.junit.Test)

Example 2 with TaskCompletionEvent

use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.

the class CLI method listEvents.

/**
   * List the events for the given job
   * @param job the job to list
   * @param fromEventId event id for the job's events to list from
   * @param numEvents number of events we want to list
   * @throws IOException
   */
private void listEvents(Job job, int fromEventId, int numEvents) throws IOException, InterruptedException {
    TaskCompletionEvent[] events = job.getTaskCompletionEvents(fromEventId, numEvents);
    System.out.println("Task completion events for " + job.getJobID());
    System.out.println("Number of events (from " + fromEventId + ") are: " + events.length);
    for (TaskCompletionEvent event : events) {
        System.out.println(event.getStatus() + " " + event.getTaskAttemptId() + " " + getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
    }
}
Also used : TaskCompletionEvent(org.apache.hadoop.mapreduce.TaskCompletionEvent)

Example 3 with TaskCompletionEvent

use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.

the class TestMRJobs method testFailingMapper.

@Test(timeout = 60000)
public void testFailingMapper() throws IOException, InterruptedException, ClassNotFoundException {
    LOG.info("\n\n\nStarting testFailingMapper().");
    if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
        LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
        return;
    }
    Job job = runFailingMapperJob();
    TaskID taskID = new TaskID(job.getJobID(), TaskType.MAP, 0);
    TaskAttemptID aId = new TaskAttemptID(taskID, 0);
    System.out.println("Diagnostics for " + aId + " :");
    for (String diag : job.getTaskDiagnostics(aId)) {
        System.out.println(diag);
    }
    aId = new TaskAttemptID(taskID, 1);
    System.out.println("Diagnostics for " + aId + " :");
    for (String diag : job.getTaskDiagnostics(aId)) {
        System.out.println(diag);
    }
    TaskCompletionEvent[] events = job.getTaskCompletionEvents(0, 2);
    Assert.assertEquals(TaskCompletionEvent.Status.FAILED, events[0].getStatus());
    Assert.assertEquals(TaskCompletionEvent.Status.TIPFAILED, events[1].getStatus());
    Assert.assertEquals(JobStatus.State.FAILED, job.getJobState());
    verifyFailingMapperCounters(job);
// TODO later:  add explicit "isUber()" checks of some sort
}
Also used : TaskID(org.apache.hadoop.mapreduce.TaskID) TaskCompletionEvent(org.apache.hadoop.mapreduce.TaskCompletionEvent) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) RunningJob(org.apache.hadoop.mapred.RunningJob) Job(org.apache.hadoop.mapreduce.Job) RandomTextWriterJob(org.apache.hadoop.RandomTextWriterJob) SleepJob(org.apache.hadoop.mapreduce.SleepJob) File(java.io.File) Test(org.junit.Test)

Aggregations

TaskCompletionEvent (org.apache.hadoop.mapreduce.TaskCompletionEvent)3 File (java.io.File)2 Job (org.apache.hadoop.mapreduce.Job)2 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)2 TaskID (org.apache.hadoop.mapreduce.TaskID)2 Test (org.junit.Test)2 IOException (java.io.IOException)1 RandomTextWriterJob (org.apache.hadoop.RandomTextWriterJob)1 RunningJob (org.apache.hadoop.mapred.RunningJob)1 SleepJob (org.apache.hadoop.mapreduce.SleepJob)1