Search in sources :

Example 46 with JobID

use of org.apache.hadoop.mapreduce.JobID in project hadoop by apache.

the class TestFileNameIndexUtils method testTrimJobNameEqualsLimitLength.

@Test
public void testTrimJobNameEqualsLimitLength() throws IOException {
    int jobNameTrimLength = 9;
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    info.setJobName(JOB_NAME);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength);
    JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
    Assert.assertEquals("Job name did not get trimmed correctly", info.getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName());
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 47 with JobID

use of org.apache.hadoop.mapreduce.JobID in project hadoop by apache.

the class TestFileNameIndexUtils method testJobNameWithMultibyteChars.

/**
   * Verify the name of jobhistory file is not greater than 255 bytes
   * even if there are some multibyte characters in the job name.
   */
@Test
public void testJobNameWithMultibyteChars() throws IOException {
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    StringBuilder sb = new StringBuilder();
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    // which is encoded into 1 x 3 = 3 characters by URL encode.
    for (int i = 0; i < 100; i++) {
        sb.append('%');
    }
    String longJobName = sb.toString();
    info.setJobName(longJobName);
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 50);
    Assert.assertTrue(jobHistoryFile.length() <= 255);
    String trimedJobName = jobHistoryFile.split(FileNameIndexUtils.DELIMITER)[// 3 is index of job name
    3];
    // 3 x 16 < 50 < 3 x 17 so the length of trimedJobName should be 48
    Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length);
    // validate whether trimmedJobName by testing reversibility
    byte[] trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
    String reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
    Assert.assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8));
    sb.setLength(0);
    // which is encoded into 2 x 3 = 6 characters by URL encode.
    for (int i = 0; i < 100; i++) {
        // large omega
        sb.append('Ω');
    }
    longJobName = sb.toString();
    info.setJobName(longJobName);
    jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 27);
    Assert.assertTrue(jobHistoryFile.length() <= 255);
    trimedJobName = jobHistoryFile.split(FileNameIndexUtils.DELIMITER)[// 3 is index of job name
    3];
    // 6 x 4 < 27 < 6 x 5 so the length of trimedJobName should be 24
    Assert.assertEquals(24, trimedJobName.getBytes(UTF_8).length);
    // validate whether trimmedJobName by testing reversibility
    trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
    reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
    Assert.assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8));
    sb.setLength(0);
    // which is encoded into 3 x 3 = 9 characters by URL encode.
    for (int i = 0; i < 100; i++) {
        // rightwards arrow
        sb.append('→');
    }
    longJobName = sb.toString();
    info.setJobName(longJobName);
    jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 40);
    Assert.assertTrue(jobHistoryFile.length() <= 255);
    trimedJobName = jobHistoryFile.split(FileNameIndexUtils.DELIMITER)[// 3 is index of job name
    3];
    // 9 x 4 < 40 < 9 x 5 so the length of trimedJobName should be 36
    Assert.assertEquals(36, trimedJobName.getBytes(UTF_8).length);
    // validate whether trimmedJobName by testing reversibility
    trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
    reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
    Assert.assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8));
    sb.setLength(0);
    // which is encoded into 4 x 3 = 12 characters by URL encode.
    for (int i = 0; i < 100; i++) {
        // Mugil cephalus in Kanji.
        sb.append("𩸽");
    }
    longJobName = sb.toString();
    info.setJobName(longJobName);
    jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 49);
    Assert.assertTrue(jobHistoryFile.length() <= 255);
    trimedJobName = jobHistoryFile.split(FileNameIndexUtils.DELIMITER)[// 3 is index of job name
    3];
    // 12 x 4 < 49 < 12 x 5 so the length of trimedJobName should be 48
    Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length);
    // validate whether trimmedJobName by testing reversibility
    trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
    reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
    Assert.assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8));
    sb.setLength(0);
    // Test for the combination of 1 to 4 bytes UTF-8 characters
    // cat in Kanji (encoded into 3 bytes x 3 characters)
    sb.append('猫').append(// (encoded into 1 byte x 3 characters)
    "[").append(// small lambda (encoded into 2 bytes x 3 characters)
    'λ').append(// (encoded into 1 byte x 3 characters)
    '/').append(// not url-encoded (1 byte x 1 character)
    'A').append(// flying fish in
    "𩹉").append(// dog in Kanji (encoded into 3 bytes x 3 characters)
    '犬');
    longJobName = sb.toString();
    info.setJobName(longJobName);
    jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 23);
    Assert.assertTrue(jobHistoryFile.length() <= 255);
    trimedJobName = jobHistoryFile.split(FileNameIndexUtils.DELIMITER)[// 3 is index of job name
    3];
    // total size of the first 5 characters = 22
    // 23 < total size of the first 6 characters
    Assert.assertEquals(22, trimedJobName.getBytes(UTF_8).length);
    // validate whether trimmedJobName by testing reversibility
    trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
    reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
    Assert.assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8));
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 48 with JobID

use of org.apache.hadoop.mapreduce.JobID in project hadoop by apache.

the class TestFileNameIndexUtils method testJobNamePercentEncoding.

@Test
public void testJobNamePercentEncoding() throws IOException {
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME);
    info.setJobName(JOB_NAME_WITH_DELIMITER);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
    Assert.assertTrue("Job name not encoded correctly into job history file", jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE));
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 49 with JobID

use of org.apache.hadoop.mapreduce.JobID in project hadoop by apache.

the class TestFileNameIndexUtils method testUserNamePercentEncoding.

@Test
public void testUserNamePercentEncoding() throws IOException {
    JobIndexInfo info = new JobIndexInfo();
    JobID oldJobId = JobID.forName(JOB_ID);
    JobId jobId = TypeConverter.toYarn(oldJobId);
    info.setJobId(jobId);
    info.setSubmitTime(Long.parseLong(SUBMIT_TIME));
    info.setUser(USER_NAME_WITH_DELIMITER);
    info.setJobName(JOB_NAME);
    info.setFinishTime(Long.parseLong(FINISH_TIME));
    info.setNumMaps(Integer.parseInt(NUM_MAPS));
    info.setNumReduces(Integer.parseInt(NUM_REDUCES));
    info.setJobStatus(JOB_STATUS);
    info.setQueueName(QUEUE_NAME);
    info.setJobStartTime(Long.parseLong(JOB_START_TIME));
    String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
    Assert.assertTrue("User name not encoded correctly into job history file", jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE));
}
Also used : JobID(org.apache.hadoop.mapreduce.JobID) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) Test(org.junit.Test)

Example 50 with JobID

use of org.apache.hadoop.mapreduce.JobID in project hadoop by apache.

the class TestJobInfo method testTaskID.

@Test(timeout = 5000)
public void testTaskID() throws IOException, InterruptedException {
    JobID jobid = new JobID("1014873536921", 6);
    TaskID tid = new TaskID(jobid, TaskType.MAP, 0);
    org.apache.hadoop.mapred.TaskID tid1 = org.apache.hadoop.mapred.TaskID.downgrade(tid);
    org.apache.hadoop.mapred.TaskReport treport = new org.apache.hadoop.mapred.TaskReport(tid1, 0.0f, State.FAILED.toString(), null, TIPStatus.FAILED, 100, 100, new org.apache.hadoop.mapred.Counters());
    Assert.assertEquals(treport.getTaskId(), "task_1014873536921_0006_m_000000");
    Assert.assertEquals(treport.getTaskID().toString(), "task_1014873536921_0006_m_000000");
}
Also used : TaskID(org.apache.hadoop.mapreduce.TaskID) JobID(org.apache.hadoop.mapreduce.JobID) Test(org.junit.Test)

Aggregations

JobID (org.apache.hadoop.mapreduce.JobID)61 Test (org.junit.Test)33 JobId (org.apache.hadoop.mapreduce.v2.api.records.JobId)17 IOException (java.io.IOException)16 TaskAttemptID (org.apache.hadoop.mapreduce.TaskAttemptID)16 TaskID (org.apache.hadoop.mapreduce.TaskID)16 Configuration (org.apache.hadoop.conf.Configuration)12 Job (org.apache.hadoop.mapreduce.Job)8 ArrayList (java.util.ArrayList)7 Path (org.apache.hadoop.fs.Path)7 EventHandler (org.apache.hadoop.yarn.event.EventHandler)7 HashMap (java.util.HashMap)6 FileSystem (org.apache.hadoop.fs.FileSystem)6 JobConf (org.apache.hadoop.mapred.JobConf)6 TaskAttemptInfo (org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo)6 OutputCommitter (org.apache.hadoop.mapreduce.OutputCommitter)5 Event (org.apache.hadoop.mapreduce.jobhistory.Event)5 EventType (org.apache.hadoop.mapreduce.jobhistory.EventType)5 JobHistoryEvent (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent)5 JobHistoryEventHandler (org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler)5