Search in sources :

Example 36 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestFileNameIndexUtils method testTrimJobName.

@Test
public void testTrimJobName() throws IOException {
    int jobNameTrimLength = 5;
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    info.setJobName(JOB_NAME);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength);
    JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
    Assert.assertEquals("Job name did not get trimmed correctly", info.getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName());
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 37 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestFileNameIndexUtils method testJobHistoryFileNameBackwardsCompatible.

@Test
public void testJobHistoryFileNameBackwardsCompatible() throws IOException {
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    long submitTime = Long.parseLong(SUBMIT_TIME);
    long finishTime = Long.parseLong(FINISH_TIME);
    int numMaps = Integer.parseInt(NUM_MAPS);
    int numReduces = Integer.parseInt(NUM_REDUCES);
    String jobHistoryFile = String.format(OLD_JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, USER_NAME, JOB_NAME, FINISH_TIME, NUM_MAPS, NUM_REDUCES, JOB_STATUS);
    JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
    Assert.assertEquals("Job id incorrect after decoding old history file", jobId, info.getJobId());
    Assert.assertEquals("Submit time incorrect after decoding old history file", submitTime, info.getSubmitTime());
    Assert.assertEquals("User incorrect after decoding old history file", USER_NAME, info.getUser());
    Assert.assertEquals("Job name incorrect after decoding old history file", JOB_NAME, info.getJobName());
    Assert.assertEquals("Finish time incorrect after decoding old history file", finishTime, info.getFinishTime());
    Assert.assertEquals("Num maps incorrect after decoding old history file", numMaps, info.getNumMaps());
    Assert.assertEquals("Num reduces incorrect after decoding old history file", numReduces, info.getNumReduces());
    Assert.assertEquals("Job status incorrect after decoding old history file", JOB_STATUS, info.getJobStatus());
    Assert.assertNull("Queue name incorrect after decoding old history file", info.getQueueName());
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 38 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestFileNameIndexUtils method testQueueNamePercentEncoding.

@Test
public void testQueueNamePercentEncoding() throws IOException {
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    info.setJobName(JOB_NAME);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME_WITH_DELIMITER);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
    Assert.assertTrue("Queue name not encoded correctly into job history file", jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE));
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 39 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestFileNameIndexUtils method testEncodingDecodingEquivalence.

@Test
public void testEncodingDecodingEquivalence() throws IOException {
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    info.setJobName(JOB_NAME);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
    JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
    Assert.assertEquals("Job id different after encoding and decoding", info.getJobId(), parsedInfo.getJobId());
    Assert.assertEquals("Submit time different after encoding and decoding", info.getSubmitTime(), parsedInfo.getSubmitTime());
    Assert.assertEquals("User different after encoding and decoding", info.getUser(), parsedInfo.getUser());
    Assert.assertEquals("Job name different after encoding and decoding", info.getJobName(), parsedInfo.getJobName());
    Assert.assertEquals("Finish time different after encoding and decoding", info.getFinishTime(), parsedInfo.getFinishTime());
    Assert.assertEquals("Num maps different after encoding and decoding", info.getNumMaps(), parsedInfo.getNumMaps());
    Assert.assertEquals("Num reduces different after encoding and decoding", info.getNumReduces(), parsedInfo.getNumReduces());
    Assert.assertEquals("Job status different after encoding and decoding", info.getJobStatus(), parsedInfo.getJobStatus());
    Assert.assertEquals("Queue name different after encoding and decoding", info.getQueueName(), parsedInfo.getQueueName());
    Assert.assertEquals("Job start time different after encoding and decoding", info.getJobStartTime(), parsedInfo.getJobStartTime());
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 40 with JobId

use of org.apache.hadoop.mapreduce.v2.api.records.JobId in project hadoop by apache.

the class TestJobHistoryParsing method testScanningOldDirs.

@Test(timeout = 50000)
public void testScanningOldDirs() throws Exception {
    LOG.info("STARTING testScanningOldDirs");
    try {
        Configuration conf = new Configuration();
        conf.setClass(NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, MyResolver.class, DNSToSwitchMapping.class);
        RackResolver.init(conf);
        MRApp app = new MRAppWithHistory(1, 1, true, this.getClass().getName(), true);
        app.submit(conf);
        Job job = app.getContext().getAllJobs().values().iterator().next();
        JobId jobId = job.getID();
        LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
        app.waitForState(job, JobState.SUCCEEDED);
        // make sure all events are flushed
        app.waitForState(Service.STATE.STOPPED);
        HistoryFileManagerForTest hfm = new HistoryFileManagerForTest();
        hfm.init(conf);
        HistoryFileInfo fileInfo = hfm.getFileInfo(jobId);
        Assert.assertNotNull("Unable to locate job history", fileInfo);
        // force the manager to "forget" the job
        hfm.deleteJobFromJobListCache(fileInfo);
        final int msecPerSleep = 10;
        int msecToSleep = 10 * 1000;
        while (fileInfo.isMovePending() && msecToSleep > 0) {
            Assert.assertTrue(!fileInfo.didMoveFail());
            msecToSleep -= msecPerSleep;
            Thread.sleep(msecPerSleep);
        }
        Assert.assertTrue("Timeout waiting for history move", msecToSleep > 0);
        fileInfo = hfm.getFileInfo(jobId);
        hfm.stop();
        Assert.assertNotNull("Unable to locate old job history", fileInfo);
        Assert.assertTrue("HistoryFileManager not shutdown properly", hfm.moveToDoneExecutor.isTerminated());
    } finally {
        LOG.info("FINISHED testScanningOldDirs");
    }
}
Also used : MRAppWithHistory(org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory) HistoryFileInfo(org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo) Configuration(org.apache.hadoop.conf.Configuration) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) MRApp(org.apache.hadoop.mapreduce.v2.app.MRApp) Test(org.junit.Test)

Aggregations

JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)276 Test (org.junit.Test)238 Job (org.apache.hadoop.mapreduce.v2.app.job.Job)212 ClientResponse (com.sun.jersey.api.client.ClientResponse)103 WebResource (com.sun.jersey.api.client.WebResource)103 Task (org.apache.hadoop.mapreduce.v2.app.job.Task)88 JSONObject (org.codehaus.jettison.json.JSONObject)81 Configuration (org.apache.hadoop.conf.Configuration)77 TaskId (org.apache.hadoop.mapreduce.v2.api.records.TaskId)61 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)55 Path (org.apache.hadoop.fs.Path)52 TaskAttemptId (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId)51 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)47 TaskAttempt (org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt)41 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)36 AppContext (org.apache.hadoop.mapreduce.v2.app.AppContext)35 IOException (java.io.IOException)32 JobConf (org.apache.hadoop.mapred.JobConf)28 HistoryFileInfo (org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo)25 JobID (org.apache.hadoop.mapreduce.JobID)23