use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser in project hadoop by apache.
the class TestJobHistoryEntities method testCompletedJobWithDiagnostics.
@Test(timeout = 30000)
public void testCompletedJobWithDiagnostics() throws Exception {
final String jobError = "Job Diagnostics";
JobInfo jobInfo = spy(new JobInfo());
when(jobInfo.getErrorInfo()).thenReturn(jobError);
when(jobInfo.getJobStatus()).thenReturn(JobState.FAILED.toString());
when(jobInfo.getAMInfos()).thenReturn(Collections.<JobHistoryParser.AMInfo>emptyList());
final JobHistoryParser mockParser = mock(JobHistoryParser.class);
when(mockParser.parse()).thenReturn(jobInfo);
HistoryFileInfo info = mock(HistoryFileInfo.class);
when(info.getConfFile()).thenReturn(fullConfPath);
when(info.getHistoryFile()).thenReturn(fullHistoryPath);
CompletedJob job = new CompletedJob(conf, jobId, fullHistoryPath, loadTasks, "user", info, jobAclsManager) {
@Override
protected JobHistoryParser createJobHistoryParser(Path historyFileAbsolute) throws IOException {
return mockParser;
}
};
assertEquals(jobError, job.getReport().getDiagnostics());
}
use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser in project hadoop by apache.
the class JobHistoryFileParser method parseHistoryFile.
public JobInfo parseHistoryFile(Path path) throws IOException {
LOG.info("parsing job history file " + path);
JobHistoryParser parser = new JobHistoryParser(fs, path);
return parser.parse();
}
use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser in project hadoop by apache.
the class MRAppMaster method parsePreviousJobHistory.
private void parsePreviousJobHistory() throws IOException {
FSDataInputStream in = getPreviousJobHistoryStream(getConfig(), appAttemptID);
JobHistoryParser parser = new JobHistoryParser(in);
JobInfo jobInfo = parser.parse();
Exception parseException = parser.getParseException();
if (parseException != null) {
LOG.info("Got an error parsing job-history file" + ", ignoring incomplete events.", parseException);
}
Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo.getAllTasks();
for (TaskInfo taskInfo : taskInfos.values()) {
if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator = taskInfo.getAllTaskAttempts().entrySet().iterator();
while (taskAttemptIterator.hasNext()) {
Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
taskAttemptIterator.remove();
}
}
completedTasksFromPreviousRun.put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
LOG.info("Read from history task " + TypeConverter.toYarn(taskInfo.getTaskId()));
}
}
LOG.info("Read completed tasks from history " + completedTasksFromPreviousRun.size());
recoveredJobStartTime = jobInfo.getLaunchTime();
// recover AMInfos
List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
if (jhAmInfoList != null) {
for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(), jhAmInfo.getStartTime(), jhAmInfo.getContainerId(), jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(), jhAmInfo.getNodeManagerHttpPort());
amInfos.add(amInfo);
}
}
}
use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser in project hadoop by apache.
the class TestJobHistoryParsing method testDiagnosticsForKilledJob.
@Test(timeout = 60000)
public void testDiagnosticsForKilledJob() throws Exception {
LOG.info("STARTING testDiagnosticsForKilledJob");
try {
final Configuration conf = new Configuration();
conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
MRApp app = new MRAppWithHistoryWithJobKilled(2, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
JobId jobId = job.getID();
app.waitForState(job, JobState.KILLED);
// make sure all events are flushed
app.waitForState(Service.STATE.STOPPED);
JobHistory jobHistory = new JobHistory();
jobHistory.init(conf);
HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
JobHistoryParser parser;
JobInfo jobInfo;
synchronized (fileInfo) {
Path historyFilePath = fileInfo.getHistoryFile();
FSDataInputStream in = null;
FileContext fc = null;
try {
fc = FileContext.getFileContext(conf);
in = fc.open(fc.makeQualified(historyFilePath));
} catch (IOException ioe) {
LOG.info("Can not open history file: " + historyFilePath, ioe);
throw (new Exception("Can not open History File"));
}
parser = new JobHistoryParser(in);
jobInfo = parser.parse();
}
Exception parseException = parser.getParseException();
assertNull("Caught an expected exception " + parseException, parseException);
final List<String> originalDiagnostics = job.getDiagnostics();
final String historyError = jobInfo.getErrorInfo();
assertTrue("No original diagnostics for a failed job", originalDiagnostics != null && !originalDiagnostics.isEmpty());
assertNotNull("No history error info for a failed job ", historyError);
for (String diagString : originalDiagnostics) {
assertTrue(historyError.contains(diagString));
}
assertTrue("No killed message in diagnostics", historyError.contains(JobImpl.JOB_KILLED_DIAG));
} finally {
LOG.info("FINISHED testDiagnosticsForKilledJob");
}
}
use of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser in project hadoop by apache.
the class TestJobHistoryParsing method testCountersForFailedTask.
@Test(timeout = 60000)
public void testCountersForFailedTask() throws Exception {
LOG.info("STARTING testCountersForFailedTask");
try {
Configuration conf = new Configuration();
conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
MRApp app = new MRAppWithHistoryWithFailedTask(2, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
JobId jobId = job.getID();
app.waitForState(job, JobState.FAILED);
// make sure all events are flushed
app.waitForState(Service.STATE.STOPPED);
JobHistory jobHistory = new JobHistory();
jobHistory.init(conf);
HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
JobHistoryParser parser;
JobInfo jobInfo;
synchronized (fileInfo) {
Path historyFilePath = fileInfo.getHistoryFile();
FSDataInputStream in = null;
FileContext fc = null;
try {
fc = FileContext.getFileContext(conf);
in = fc.open(fc.makeQualified(historyFilePath));
} catch (IOException ioe) {
LOG.info("Can not open history file: " + historyFilePath, ioe);
throw (new Exception("Can not open History File"));
}
parser = new JobHistoryParser(in);
jobInfo = parser.parse();
}
Exception parseException = parser.getParseException();
Assert.assertNull("Caught an expected exception " + parseException, parseException);
for (Map.Entry<TaskID, TaskInfo> entry : jobInfo.getAllTasks().entrySet()) {
TaskId yarnTaskID = TypeConverter.toYarn(entry.getKey());
CompletedTask ct = new CompletedTask(yarnTaskID, entry.getValue());
Assert.assertNotNull("completed task report has null counters", ct.getReport().getCounters());
}
final List<String> originalDiagnostics = job.getDiagnostics();
final String historyError = jobInfo.getErrorInfo();
assertTrue("No original diagnostics for a failed job", originalDiagnostics != null && !originalDiagnostics.isEmpty());
assertNotNull("No history error info for a failed job ", historyError);
for (String diagString : originalDiagnostics) {
assertTrue(historyError.contains(diagString));
}
} finally {
LOG.info("FINISHED testCountersForFailedTask");
}
}
Aggregations