Search in sources :

Example 1 with CLI

use of org.apache.hadoop.mapreduce.tools.CLI in project hadoop by apache.

the class TestMRJobClient method testListAttemptIds.

/**
   * print AttemptIds list 
   */
private void testListAttemptIds(String jobId, Configuration conf) throws Exception {
    CLI jc = createJobClient();
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    int exitCode = runTool(conf, jc, new String[] { "-list-attempt-ids" }, out);
    assertEquals("Exit code", -1, exitCode);
    exitCode = runTool(conf, jc, new String[] { "-list-attempt-ids", jobId, "MAP", "completed" }, out);
    assertEquals("Exit code", 0, exitCode);
    String line;
    BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(out.toByteArray())));
    int counter = 0;
    while ((line = br.readLine()) != null) {
        LOG.info("line = " + line);
        counter++;
    }
    assertEquals(1, counter);
}
Also used : CLI(org.apache.hadoop.mapreduce.tools.CLI) InputStreamReader(java.io.InputStreamReader) ByteArrayInputStream(java.io.ByteArrayInputStream) BufferedReader(java.io.BufferedReader) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 2 with CLI

use of org.apache.hadoop.mapreduce.tools.CLI in project hadoop by apache.

the class TestMRJobClient method testfailTask.

/**
   * test fail task
   */
private void testfailTask(Configuration conf) throws Exception {
    Job job = runJobInBackGround(conf);
    CLI jc = createJobClient();
    TaskID tid = new TaskID(job.getJobID(), TaskType.MAP, 0);
    TaskAttemptID taid = new TaskAttemptID(tid, 1);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // TaskAttemptId is not set
    int exitCode = runTool(conf, jc, new String[] { "-fail-task" }, out);
    assertEquals("Exit code", -1, exitCode);
    runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
    String answer = new String(out.toByteArray(), "UTF-8");
    assertTrue(answer.contains("Killed task " + taid + " by failing it"));
}
Also used : CLI(org.apache.hadoop.mapreduce.tools.CLI) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 3 with CLI

use of org.apache.hadoop.mapreduce.tools.CLI in project hadoop by apache.

the class TestMRJobClient method testJobHistory.

/**
   * print job history from file 
   */
private void testJobHistory(String jobId, Configuration conf) throws Exception {
    CLI jc = createJobClient();
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // Find jhist file
    String historyFileUri = null;
    RemoteIterator<LocatedFileStatus> it = getFileSystem().listFiles(new Path("/"), true);
    while (it.hasNext() && historyFileUri == null) {
        LocatedFileStatus file = it.next();
        if (file.getPath().getName().endsWith(".jhist")) {
            historyFileUri = file.getPath().toUri().toString();
        }
    }
    assertNotNull("Could not find jhist file", historyFileUri);
    for (String historyFileOrJobId : new String[] { historyFileUri, jobId }) {
        // Try a bunch of different valid combinations of the command
        int exitCode = runTool(conf, jc, new String[] { "-history", "all", historyFileOrJobId }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryHumanOutput(jobId, out);
        File outFile = File.createTempFile("myout", ".txt");
        exitCode = runTool(conf, jc, new String[] { "-history", "all", historyFileOrJobId, "-outfile", outFile.getAbsolutePath() }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryHumanFileOutput(jobId, out, outFile);
        outFile = File.createTempFile("myout", ".txt");
        exitCode = runTool(conf, jc, new String[] { "-history", "all", historyFileOrJobId, "-outfile", outFile.getAbsolutePath(), "-format", "human" }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryHumanFileOutput(jobId, out, outFile);
        exitCode = runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "-format", "human" }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryHumanOutput(jobId, out);
        exitCode = runTool(conf, jc, new String[] { "-history", "all", historyFileOrJobId, "-format", "json" }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryJSONOutput(jobId, out);
        outFile = File.createTempFile("myout", ".txt");
        exitCode = runTool(conf, jc, new String[] { "-history", "all", historyFileOrJobId, "-outfile", outFile.getAbsolutePath(), "-format", "json" }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryJSONFileOutput(jobId, out, outFile);
        exitCode = runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "-format", "json" }, out);
        assertEquals("Exit code", 0, exitCode);
        checkHistoryJSONOutput(jobId, out);
        // Check some bad arguments
        exitCode = runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "foo" }, out);
        assertEquals("Exit code", -1, exitCode);
        exitCode = runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "-format" }, out);
        assertEquals("Exit code", -1, exitCode);
        exitCode = runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "-outfile" }, out);
        assertEquals("Exit code", -1, exitCode);
        try {
            runTool(conf, jc, new String[] { "-history", historyFileOrJobId, "-format", "foo" }, out);
            fail();
        } catch (IllegalArgumentException e) {
        // Expected
        }
    }
    try {
        runTool(conf, jc, new String[] { "-history", "not_a_valid_history_file_or_job_id" }, out);
        fail();
    } catch (IllegalArgumentException e) {
    // Expected
    }
}
Also used : Path(org.apache.hadoop.fs.Path) CLI(org.apache.hadoop.mapreduce.tools.CLI) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) ByteArrayOutputStream(java.io.ByteArrayOutputStream) File(java.io.File)

Example 4 with CLI

use of org.apache.hadoop.mapreduce.tools.CLI in project hadoop by apache.

the class TestMRJobClient method testListTrackers.

/**
   * print tracker list
   */
private void testListTrackers(Configuration conf) throws Exception {
    CLI jc = createJobClient();
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    int exitCode = runTool(conf, jc, new String[] { "-list-active-trackers", "second parameter" }, out);
    assertEquals("Exit code", -1, exitCode);
    exitCode = runTool(conf, jc, new String[] { "-list-active-trackers" }, out);
    assertEquals("Exit code", 0, exitCode);
    String line;
    BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(out.toByteArray())));
    int counter = 0;
    while ((line = br.readLine()) != null) {
        LOG.info("line = " + line);
        counter++;
    }
    assertEquals(2, counter);
}
Also used : CLI(org.apache.hadoop.mapreduce.tools.CLI) InputStreamReader(java.io.InputStreamReader) ByteArrayInputStream(java.io.ByteArrayInputStream) BufferedReader(java.io.BufferedReader) ByteArrayOutputStream(java.io.ByteArrayOutputStream)

Example 5 with CLI

use of org.apache.hadoop.mapreduce.tools.CLI in project hadoop by apache.

the class TestMRJobClient method testJobName.

/**
   * Test -list option displays job name.
   * The name is capped to 20 characters for display.
   */
@Test
public void testJobName() throws Exception {
    Configuration conf = createJobConf();
    CLI jc = createJobClient();
    Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(), 1, 1, "short_name");
    job.setJobName("mapreduce");
    job.setPriority(JobPriority.NORMAL);
    job.waitForCompletion(true);
    String jobId = job.getJobID().toString();
    verifyJobName(jobId, "mapreduce", conf, jc);
    Job job2 = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(), 1, 1, "long_name");
    job2.setJobName("mapreduce_job_with_long_name");
    job2.setPriority(JobPriority.NORMAL);
    job2.waitForCompletion(true);
    jobId = job2.getJobID().toString();
    verifyJobName(jobId, "mapreduce_job_with_l", conf, jc);
}
Also used : CLI(org.apache.hadoop.mapreduce.tools.CLI) Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Aggregations

CLI (org.apache.hadoop.mapreduce.tools.CLI)12 ByteArrayOutputStream (java.io.ByteArrayOutputStream)11 BufferedReader (java.io.BufferedReader)6 ByteArrayInputStream (java.io.ByteArrayInputStream)5 InputStreamReader (java.io.InputStreamReader)5 File (java.io.File)3 Path (org.apache.hadoop.fs.Path)2 FileOutputStream (java.io.FileOutputStream)1 FileReader (java.io.FileReader)1 Configuration (org.apache.hadoop.conf.Configuration)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 LocatedFileStatus (org.apache.hadoop.fs.LocatedFileStatus)1 Test (org.junit.Test)1