use of org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo in project hadoop by apache.
the class TestJobIdHistoryFileInfoMap method testWithSingleElement.
/**
* Trivial test case that verifies basic functionality of {@link
* JobIdHistoryFileInfoMap}
*/
@Test(timeout = 2000)
public void testWithSingleElement() throws InterruptedException {
JobIdHistoryFileInfoMap mapWithSize = new JobIdHistoryFileInfoMap();
JobId jobId = MRBuilderUtils.newJobId(1, 1, 1);
HistoryFileInfo fileInfo1 = Mockito.mock(HistoryFileInfo.class);
Mockito.when(fileInfo1.getJobId()).thenReturn(jobId);
// add it twice
assertEquals("Incorrect return on putIfAbsent()", null, mapWithSize.putIfAbsent(jobId, fileInfo1));
assertEquals("Incorrect return on putIfAbsent()", fileInfo1, mapWithSize.putIfAbsent(jobId, fileInfo1));
// check get()
assertEquals("Incorrect get()", fileInfo1, mapWithSize.get(jobId));
assertTrue("Incorrect size()", checkSize(mapWithSize, 1));
// check navigableKeySet()
NavigableSet<JobId> set = mapWithSize.navigableKeySet();
assertEquals("Incorrect navigableKeySet()", 1, set.size());
assertTrue("Incorrect navigableKeySet()", set.contains(jobId));
// check values()
Collection<HistoryFileInfo> values = mapWithSize.values();
assertEquals("Incorrect values()", 1, values.size());
assertTrue("Incorrect values()", values.contains(fileInfo1));
}
use of org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo in project hadoop by apache.
the class TestJobListCache method testEviction.
@Test(timeout = 5000)
public void testEviction() throws InterruptedException {
int maxSize = 2;
JobListCache cache = new JobListCache(maxSize, 1000);
JobId jobId1 = MRBuilderUtils.newJobId(1, 1, 1);
HistoryFileInfo fileInfo1 = Mockito.mock(HistoryFileInfo.class);
Mockito.when(fileInfo1.getJobId()).thenReturn(jobId1);
JobId jobId2 = MRBuilderUtils.newJobId(2, 2, 2);
HistoryFileInfo fileInfo2 = Mockito.mock(HistoryFileInfo.class);
Mockito.when(fileInfo2.getJobId()).thenReturn(jobId2);
JobId jobId3 = MRBuilderUtils.newJobId(3, 3, 3);
HistoryFileInfo fileInfo3 = Mockito.mock(HistoryFileInfo.class);
Mockito.when(fileInfo3.getJobId()).thenReturn(jobId3);
cache.addIfAbsent(fileInfo1);
cache.addIfAbsent(fileInfo2);
cache.addIfAbsent(fileInfo3);
Collection<HistoryFileInfo> values;
for (int i = 0; i < 9; i++) {
values = cache.values();
if (values.size() > maxSize) {
Thread.sleep(100);
} else {
assertFalse("fileInfo1 should have been evicted", values.contains(fileInfo1));
return;
}
}
fail("JobListCache didn't delete the extra entry");
}
use of org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo in project hadoop by apache.
the class TestHistoryFileManager method testHistoryFileInfoSummaryFileNotExist.
@Test
public void testHistoryFileInfoSummaryFileNotExist() throws Exception {
HistoryFileManagerTest hmTest = new HistoryFileManagerTest();
String job = "job_1410889000000_123456";
Path summaryFile = new Path(job + ".summary");
JobIndexInfo jobIndexInfo = new JobIndexInfo();
jobIndexInfo.setJobId(TypeConverter.toYarn(JobID.forName(job)));
Configuration conf = dfsCluster.getConfiguration(0);
conf.set(JHAdminConfig.MR_HISTORY_DONE_DIR, "/" + UUID.randomUUID());
conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR, "/" + UUID.randomUUID());
hmTest.serviceInit(conf);
HistoryFileInfo info = hmTest.getHistoryFileInfo(null, null, summaryFile, jobIndexInfo, false);
info.moveToDone();
Assert.assertFalse(info.didMoveFail());
}
use of org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo in project hadoop by apache.
the class TestHistoryFileManager method testHistoryFileInfoShouldReturnCompletedJobIfMaxNotConfiged.
@Test
public void testHistoryFileInfoShouldReturnCompletedJobIfMaxNotConfiged() throws Exception {
HistoryFileManagerTest hmTest = new HistoryFileManagerTest();
Configuration conf = dfsCluster.getConfiguration(0);
conf.setInt(JHAdminConfig.MR_HS_LOADED_JOBS_TASKS_MAX, -1);
hmTest.init(conf);
final String jobId = "job_1416424547277_0002";
JobIndexInfo jobIndexInfo = new JobIndexInfo();
jobIndexInfo.setJobId(TypeConverter.toYarn(JobID.forName(jobId)));
jobIndexInfo.setNumMaps(100);
jobIndexInfo.setNumReduces(100);
final String historyFile = getClass().getClassLoader().getResource("job_2.0.3-alpha-FAILED.jhist").getFile();
final Path historyFilePath = FileSystem.getLocal(conf).makeQualified(new Path(historyFile));
HistoryFileInfo info = hmTest.getHistoryFileInfo(historyFilePath, null, null, jobIndexInfo, false);
Job job = info.loadJob();
Assert.assertTrue("Should return an instance of CompletedJob as " + "a result of parsing the job history file of the job", job instanceof CompletedJob);
}
use of org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo in project hadoop by apache.
the class HistoryFileManager method clean.
/**
* Clean up older history files.
*
* @throws IOException
* on any error trying to remove the entries.
*/
@SuppressWarnings("unchecked")
void clean() throws IOException {
long cutoff = System.currentTimeMillis() - maxHistoryAge;
boolean halted = false;
List<FileStatus> serialDirList = getHistoryDirsForCleaning(cutoff);
// Sort in ascending order. Relies on YYYY/MM/DD/Serial
Collections.sort(serialDirList);
for (FileStatus serialDir : serialDirList) {
List<FileStatus> historyFileList = scanDirectoryForHistoryFiles(serialDir.getPath(), doneDirFc);
for (FileStatus historyFile : historyFileList) {
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(historyFile.getPath().getName());
long effectiveTimestamp = getEffectiveTimestamp(jobIndexInfo.getFinishTime(), historyFile);
if (effectiveTimestamp <= cutoff) {
HistoryFileInfo fileInfo = this.jobListCache.get(jobIndexInfo.getJobId());
if (fileInfo == null) {
String confFileName = JobHistoryUtils.getIntermediateConfFileName(jobIndexInfo.getJobId());
fileInfo = createHistoryFileInfo(historyFile.getPath(), new Path(historyFile.getPath().getParent(), confFileName), null, jobIndexInfo, true);
}
deleteJobFromDone(fileInfo);
} else {
halted = true;
break;
}
}
if (!halted) {
deleteDir(serialDir);
removeDirectoryFromSerialNumberIndex(serialDir.getPath());
existingDoneSubdirs.remove(serialDir.getPath());
} else {
// Don't scan any more directories.
break;
}
}
}
Aggregations