use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestFileNameIndexUtils method testTrimJobName.
@Test
public void testTrimJobName() throws IOException {
int jobNameTrimLength = 5;
JobIndexInfo info = new JobIndexInfo();
JobID oldJobId = JobID.forName(JOB_ID);
JobId jobId = TypeConverter.toYarn(oldJobId);
info.setJobId(jobId);
info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
info.setUser(USER_NAME);
info.setJobName(JOB_NAME);
info.setFinishTime(Long.parseLong(FINISH_TIME));
info.setNumMaps(Integer.parseInt(NUM_MAPS));
info.setNumReduces(Integer.parseInt(NUM_REDUCES));
info.setJobStatus(JOB_STATUS);
info.setQueueName(QUEUE_NAME);
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength);
JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
Assert.assertEquals("Job name did not get trimmed correctly", info.getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName());
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestFileNameIndexUtils method testJobHistoryFileNameBackwardsCompatible.
@Test
public void testJobHistoryFileNameBackwardsCompatible() throws IOException {
JobID oldJobId = JobID.forName(JOB_ID);
JobId jobId = TypeConverter.toYarn(oldJobId);
long submitTime = Long.parseLong(SUBMIT_TIME);
long finishTime = Long.parseLong(FINISH_TIME);
int numMaps = Integer.parseInt(NUM_MAPS);
int numReduces = Integer.parseInt(NUM_REDUCES);
String jobHistoryFile = String.format(OLD_JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, USER_NAME, JOB_NAME, FINISH_TIME, NUM_MAPS, NUM_REDUCES, JOB_STATUS);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
Assert.assertEquals("Job id incorrect after decoding old history file", jobId, info.getJobId());
Assert.assertEquals("Submit time incorrect after decoding old history file", submitTime, info.getSubmitTime());
Assert.assertEquals("User incorrect after decoding old history file", USER_NAME, info.getUser());
Assert.assertEquals("Job name incorrect after decoding old history file", JOB_NAME, info.getJobName());
Assert.assertEquals("Finish time incorrect after decoding old history file", finishTime, info.getFinishTime());
Assert.assertEquals("Num maps incorrect after decoding old history file", numMaps, info.getNumMaps());
Assert.assertEquals("Num reduces incorrect after decoding old history file", numReduces, info.getNumReduces());
Assert.assertEquals("Job status incorrect after decoding old history file", JOB_STATUS, info.getJobStatus());
Assert.assertNull("Queue name incorrect after decoding old history file", info.getQueueName());
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestFileNameIndexUtils method testQueueNamePercentEncoding.
@Test
public void testQueueNamePercentEncoding() throws IOException {
JobIndexInfo info = new JobIndexInfo();
JobID oldJobId = JobID.forName(JOB_ID);
JobId jobId = TypeConverter.toYarn(oldJobId);
info.setJobId(jobId);
info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
info.setUser(USER_NAME);
info.setJobName(JOB_NAME);
info.setFinishTime(Long.parseLong(FINISH_TIME));
info.setNumMaps(Integer.parseInt(NUM_MAPS));
info.setNumReduces(Integer.parseInt(NUM_REDUCES));
info.setJobStatus(JOB_STATUS);
info.setQueueName(QUEUE_NAME_WITH_DELIMITER);
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
Assert.assertTrue("Queue name not encoded correctly into job history file", jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE));
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestFileNameIndexUtils method testEncodingDecodingEquivalence.
@Test
public void testEncodingDecodingEquivalence() throws IOException {
JobIndexInfo info = new JobIndexInfo();
JobID oldJobId = JobID.forName(JOB_ID);
JobId jobId = TypeConverter.toYarn(oldJobId);
info.setJobId(jobId);
info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
info.setUser(USER_NAME);
info.setJobName(JOB_NAME);
info.setFinishTime(Long.parseLong(FINISH_TIME));
info.setNumMaps(Integer.parseInt(NUM_MAPS));
info.setNumReduces(Integer.parseInt(NUM_REDUCES));
info.setJobStatus(JOB_STATUS);
info.setQueueName(QUEUE_NAME);
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
Assert.assertEquals("Job id different after encoding and decoding", info.getJobId(), parsedInfo.getJobId());
Assert.assertEquals("Submit time different after encoding and decoding", info.getSubmitTime(), parsedInfo.getSubmitTime());
Assert.assertEquals("User different after encoding and decoding", info.getUser(), parsedInfo.getUser());
Assert.assertEquals("Job name different after encoding and decoding", info.getJobName(), parsedInfo.getJobName());
Assert.assertEquals("Finish time different after encoding and decoding", info.getFinishTime(), parsedInfo.getFinishTime());
Assert.assertEquals("Num maps different after encoding and decoding", info.getNumMaps(), parsedInfo.getNumMaps());
Assert.assertEquals("Num reduces different after encoding and decoding", info.getNumReduces(), parsedInfo.getNumReduces());
Assert.assertEquals("Job status different after encoding and decoding", info.getJobStatus(), parsedInfo.getJobStatus());
Assert.assertEquals("Queue name different after encoding and decoding", info.getQueueName(), parsedInfo.getQueueName());
Assert.assertEquals("Job start time different after encoding and decoding", info.getJobStartTime(), parsedInfo.getJobStartTime());
}
use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.
the class TestJobHistoryParsing method testScanningOldDirs.
@Test(timeout = 50000)
public void testScanningOldDirs() throws Exception {
LOG.info("STARTING testScanningOldDirs");
try {
Configuration conf = new Configuration();
conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
RackResolver.init(conf);
MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
JobId jobId = job.getID();
LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
app.waitForState(job, JobState.SUCCEEDED);
// make sure all events are flushed
app.waitForState(Service.STATE.STOPPED);
HistoryFileManagerForTest hfm = new HistoryFileManagerForTest();
hfm.init(conf);
HistoryFileInfo fileInfo = hfm.getFileInfo(jobId);
Assert.assertNotNull("Unable to locate job history", fileInfo);
// force the manager to "forget" the job
hfm.deleteJobFromJobListCache(fileInfo);
final int msecPerSleep = 10;
int msecToSleep = 10 * 1000;
while (fileInfo.isMovePending() && msecToSleep > 0) {
Assert.assertTrue(!fileInfo.didMoveFail());
msecToSleep -= msecPerSleep;
Thread.sleep(msecPerSleep);
}
Assert.assertTrue("Timeout waiting for history move", msecToSleep > 0);
fileInfo = hfm.getFileInfo(jobId);
hfm.stop();
Assert.assertNotNull("Unable to locate old job history", fileInfo);
Assert.assertTrue("HistoryFileManager not shutdown properly", hfm.moveToDoneExecutor.isTerminated());
} finally {
LOG.info("FINISHED testScanningOldDirs");
}
}
Aggregations