use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.
the class TestUberAM method testFailingMapper.
@Override
@Test
public void testFailingMapper() throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("\n\n\nStarting uberized testFailingMapper().");
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
return;
}
Job job = runFailingMapperJob();
// should be able to get diags for single task attempt...
TaskID taskID = new TaskID(job.getJobID(), TaskType.MAP, 0);
TaskAttemptID aId = new TaskAttemptID(taskID, 0);
System.out.println("Diagnostics for " + aId + " :");
for (String diag : job.getTaskDiagnostics(aId)) {
System.out.println(diag);
}
// ...but not for second (shouldn't exist: uber-AM overrode max attempts)
boolean secondTaskAttemptExists = true;
try {
aId = new TaskAttemptID(taskID, 1);
System.out.println("Diagnostics for " + aId + " :");
for (String diag : job.getTaskDiagnostics(aId)) {
System.out.println(diag);
}
} catch (Exception e) {
secondTaskAttemptExists = false;
}
Assert.assertEquals(false, secondTaskAttemptExists);
TaskCompletionEvent[] events = job.getTaskCompletionEvents(0, 2);
Assert.assertEquals(1, events.length);
// TIPFAILED if it comes from the AM, FAILED if it comes from the JHS
TaskCompletionEvent.Status status = events[0].getStatus();
Assert.assertTrue(status == TaskCompletionEvent.Status.FAILED || status == TaskCompletionEvent.Status.TIPFAILED);
Assert.assertEquals(JobStatus.State.FAILED, job.getJobState());
//Disabling till UberAM honors MRJobConfig.MAP_MAX_ATTEMPTS
//verifyFailingMapperCounters(job);
// TODO later: add explicit "isUber()" checks of some sort
}
use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.
the class CLI method listEvents.
/**
* List the events for the given job
* @param job the job to list
* @param fromEventId event id for the job's events to list from
* @param numEvents number of events we want to list
* @throws IOException
*/
private void listEvents(Job job, int fromEventId, int numEvents) throws IOException, InterruptedException {
TaskCompletionEvent[] events = job.getTaskCompletionEvents(fromEventId, numEvents);
System.out.println("Task completion events for " + job.getJobID());
System.out.println("Number of events (from " + fromEventId + ") are: " + events.length);
for (TaskCompletionEvent event : events) {
System.out.println(event.getStatus() + " " + event.getTaskAttemptId() + " " + getTaskLogURL(event.getTaskAttemptId(), event.getTaskTrackerHttp()));
}
}
use of org.apache.hadoop.mapreduce.TaskCompletionEvent in project hadoop by apache.
the class TestMRJobs method testFailingMapper.
@Test(timeout = 60000)
public void testFailingMapper() throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("\n\n\nStarting testFailingMapper().");
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test.");
return;
}
Job job = runFailingMapperJob();
TaskID taskID = new TaskID(job.getJobID(), TaskType.MAP, 0);
TaskAttemptID aId = new TaskAttemptID(taskID, 0);
System.out.println("Diagnostics for " + aId + " :");
for (String diag : job.getTaskDiagnostics(aId)) {
System.out.println(diag);
}
aId = new TaskAttemptID(taskID, 1);
System.out.println("Diagnostics for " + aId + " :");
for (String diag : job.getTaskDiagnostics(aId)) {
System.out.println(diag);
}
TaskCompletionEvent[] events = job.getTaskCompletionEvents(0, 2);
Assert.assertEquals(TaskCompletionEvent.Status.FAILED, events[0].getStatus());
Assert.assertEquals(TaskCompletionEvent.Status.TIPFAILED, events[1].getStatus());
Assert.assertEquals(JobStatus.State.FAILED, job.getJobState());
verifyFailingMapperCounters(job);
// TODO later: add explicit "isUber()" checks of some sort
}
Aggregations