Search in sources :

Example 31 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class MiniDFSCluster method addToFile.

private void addToFile(String p, String address) throws IOException {
    File f = new File(p);
    f.createNewFile();
    PrintWriter writer = new PrintWriter(new FileWriter(f, true));
    try {
        writer.println(address);
    } finally {
        writer.close();
    }
}
Also used : FileWriter(java.io.FileWriter) File(java.io.File) PrintWriter(java.io.PrintWriter)

Example 32 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestDNFencing method doMetasave.

private void doMetasave(NameNode nn2) {
    nn2.getNamesystem().writeLock();
    try {
        PrintWriter pw = new PrintWriter(System.err);
        nn2.getNamesystem().getBlockManager().metaSave(pw);
        pw.flush();
    } finally {
        nn2.getNamesystem().writeUnlock();
    }
}
Also used : PrintWriter(java.io.PrintWriter)

Example 33 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestBlocks method testAttemptsBlock.

/**
   * test AttemptsBlock's rendering.
   */
@Test
public void testAttemptsBlock() {
    AppContext ctx = mock(AppContext.class);
    AppForTest app = new AppForTest(ctx);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(ApplicationIdPBImpl.newInstance(0, 1));
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.REDUCE);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.REDUCE);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    Map<TaskAttemptId, TaskAttempt> attempts = new HashMap<TaskAttemptId, TaskAttempt>();
    TaskAttempt attempt = mock(TaskAttempt.class);
    TaskAttemptId taId = new TaskAttemptIdPBImpl();
    taId.setId(0);
    taId.setTaskId(task.getID());
    when(attempt.getID()).thenReturn(taId);
    final TaskAttemptState taState = TaskAttemptState.SUCCEEDED;
    when(attempt.getState()).thenReturn(taState);
    TaskAttemptReport taReport = mock(TaskAttemptReport.class);
    when(taReport.getTaskAttemptState()).thenReturn(taState);
    when(attempt.getReport()).thenReturn(taReport);
    attempts.put(taId, attempt);
    tasks.put(taskId, task);
    when(task.getAttempts()).thenReturn(attempts);
    app.setTask(task);
    Job job = mock(Job.class);
    when(job.getTasks(TaskType.REDUCE)).thenReturn(tasks);
    app.setJob(job);
    AttemptsBlockForTest block = new AttemptsBlockForTest(app, new Configuration());
    block.addParameter(AMParams.TASK_TYPE, "r");
    block.addParameter(AMParams.ATTEMPT_STATE, "SUCCESSFUL");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    block.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("<a href='" + block.url("task", task.getID().toString()) + "'>" + "attempt_0_0001_r_000000_0</a>"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskAttemptIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskAttemptIdPBImpl) Configuration(org.apache.hadoop.conf.Configuration) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) TaskAttemptId(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) TaskAttemptReport(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport) TaskAttemptState(org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) TaskAttempt(org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 34 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestBlocks method testConfigurationBlock.

/**
   * Test rendering for ConfBlock
   */
@Test
public void testConfigurationBlock() throws Exception {
    AppContext ctx = mock(AppContext.class);
    Job job = mock(Job.class);
    Path path = new Path("conf");
    Configuration configuration = new Configuration();
    configuration.set("Key for test", "Value for test");
    final String redactedProp = "Key for redaction";
    configuration.set(MRJobConfig.MR_JOB_REDACTED_PROPERTIES, redactedProp);
    when(job.getConfFile()).thenReturn(path);
    when(job.loadConfFile()).thenReturn(configuration);
    when(ctx.getJob(any(JobId.class))).thenReturn(job);
    ConfBlockForTest configurationBlock = new ConfBlockForTest(ctx);
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    configurationBlock.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("Sorry, can't do anything without a JobID"));
    configurationBlock.addParameter(AMParams.JOB_ID, "job_01_01");
    data.reset();
    configurationBlock.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("Key for test"));
    assertTrue(data.toString().contains("Value for test"));
    assertTrue(data.toString().contains(redactedProp));
    assertTrue(data.toString().contains(MRJobConfUtil.REDACTION_REPLACEMENT_VAL));
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Example 35 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestBlocks method testTasksBlock.

/**
   * Test rendering for TasksBlock
   */
@Test
public void testTasksBlock() throws Exception {
    ApplicationId appId = ApplicationIdPBImpl.newInstance(0, 1);
    JobId jobId = new JobIdPBImpl();
    jobId.setId(0);
    jobId.setAppId(appId);
    TaskId taskId = new TaskIdPBImpl();
    taskId.setId(0);
    taskId.setTaskType(TaskType.MAP);
    taskId.setJobId(jobId);
    Task task = mock(Task.class);
    when(task.getID()).thenReturn(taskId);
    TaskReport report = mock(TaskReport.class);
    when(report.getProgress()).thenReturn(0.7f);
    when(report.getTaskState()).thenReturn(TaskState.SUCCEEDED);
    when(report.getStartTime()).thenReturn(100001L);
    when(report.getFinishTime()).thenReturn(100011L);
    when(report.getStatus()).thenReturn("Dummy Status \n*");
    when(task.getReport()).thenReturn(report);
    when(task.getType()).thenReturn(TaskType.MAP);
    Map<TaskId, Task> tasks = new HashMap<TaskId, Task>();
    tasks.put(taskId, task);
    AppContext ctx = mock(AppContext.class);
    Job job = mock(Job.class);
    when(job.getTasks()).thenReturn(tasks);
    App app = new App(ctx);
    app.setJob(job);
    TasksBlockForTest taskBlock = new TasksBlockForTest(app);
    taskBlock.addParameter(AMParams.TASK_TYPE, "m");
    PrintWriter pWriter = new PrintWriter(data);
    Block html = new BlockForTest(new HtmlBlockForTest(), pWriter, 0, false);
    taskBlock.render(html);
    pWriter.flush();
    assertTrue(data.toString().contains("task_0_0001_m_000000"));
    assertTrue(data.toString().contains("70.00"));
    assertTrue(data.toString().contains("SUCCEEDED"));
    assertTrue(data.toString().contains("100001"));
    assertTrue(data.toString().contains("100011"));
    assertFalse(data.toString().contains("Dummy Status \n*"));
    assertTrue(data.toString().contains("Dummy Status \\n*"));
}
Also used : Task(org.apache.hadoop.mapreduce.v2.app.job.Task) TaskId(org.apache.hadoop.mapreduce.v2.api.records.TaskId) TaskReport(org.apache.hadoop.mapreduce.v2.api.records.TaskReport) TaskIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.TaskIdPBImpl) HashMap(java.util.HashMap) AppContext(org.apache.hadoop.mapreduce.v2.app.AppContext) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) JobIdPBImpl(org.apache.hadoop.mapreduce.v2.api.records.impl.pb.JobIdPBImpl) HtmlBlock(org.apache.hadoop.yarn.webapp.view.HtmlBlock) Block(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block) FewAttemptsBlock(org.apache.hadoop.mapreduce.v2.app.webapp.AttemptsPage.FewAttemptsBlock) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Job(org.apache.hadoop.mapreduce.v2.app.job.Job) JobId(org.apache.hadoop.mapreduce.v2.api.records.JobId) PrintWriter(java.io.PrintWriter) BlockForTest(org.apache.hadoop.yarn.webapp.view.BlockForTest) Test(org.junit.Test)

Aggregations

PrintWriter (java.io.PrintWriter)3529 StringWriter (java.io.StringWriter)1062 IOException (java.io.IOException)653 File (java.io.File)532 Test (org.junit.Test)432 FileOutputStream (java.io.FileOutputStream)293 FileWriter (java.io.FileWriter)274 OutputStreamWriter (java.io.OutputStreamWriter)255 BufferedReader (java.io.BufferedReader)180 ArrayList (java.util.ArrayList)171 HttpServletResponse (javax.servlet.http.HttpServletResponse)141 ByteArrayOutputStream (java.io.ByteArrayOutputStream)139 FastPrintWriter (com.android.internal.util.FastPrintWriter)124 InputStreamReader (java.io.InputStreamReader)123 HttpServletRequest (javax.servlet.http.HttpServletRequest)121 Date (java.util.Date)120 HashMap (java.util.HashMap)113 Map (java.util.Map)106 BufferedWriter (java.io.BufferedWriter)105 Writer (java.io.Writer)87