Search in sources :

Example 91 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestHSWebApp method testLogsView1.

@Test
public void testLogsView1() throws IOException {
    LOG.info("HsLogsPage");
    Injector injector = WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class, new MockAppContext(0, 1, 1, 1));
    PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
    verify(spyPw).write("Cannot get container logs without a ContainerId");
    verify(spyPw).write("Cannot get container logs without a NodeId");
    verify(spyPw).write("Cannot get container logs without an app owner");
}
Also used : MockAppContext(org.apache.hadoop.mapreduce.v2.app.MockAppContext) Injector(com.google.inject.Injector) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 92 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestNewCombinerGrouping method testCombiner.

@Test
public void testCombiner() throws Exception {
    if (!new File(TEST_ROOT_DIR).mkdirs()) {
        throw new RuntimeException("Could not create test dir: " + TEST_ROOT_DIR);
    }
    File in = new File(TEST_ROOT_DIR, "input");
    if (!in.mkdirs()) {
        throw new RuntimeException("Could not create test dir: " + in);
    }
    File out = new File(TEST_ROOT_DIR, "output");
    PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
    pw.println("A|a,1");
    pw.println("A|b,2");
    pw.println("B|a,3");
    pw.println("B|b,4");
    pw.println("B|c,5");
    pw.close();
    JobConf conf = new JobConf();
    conf.set("mapreduce.framework.name", "local");
    Job job = new Job(conf);
    TextInputFormat.setInputPaths(job, new Path(in.getPath()));
    TextOutputFormat.setOutputPath(job, new Path(out.getPath()));
    job.setMapperClass(Map.class);
    job.setReducerClass(Reduce.class);
    job.setInputFormatClass(TextInputFormat.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);
    job.setOutputFormatClass(TextOutputFormat.class);
    job.setGroupingComparatorClass(GroupComparator.class);
    job.setCombinerKeyGroupingComparatorClass(GroupComparator.class);
    job.setCombinerClass(Combiner.class);
    job.getConfiguration().setInt("min.num.spills.for.combine", 0);
    job.submit();
    job.waitForCompletion(false);
    if (job.isSuccessful()) {
        Counters counters = job.getCounters();
        long combinerInputRecords = counters.findCounter("org.apache.hadoop.mapreduce.TaskCounter", "COMBINE_INPUT_RECORDS").getValue();
        long combinerOutputRecords = counters.findCounter("org.apache.hadoop.mapreduce.TaskCounter", "COMBINE_OUTPUT_RECORDS").getValue();
        Assert.assertTrue(combinerInputRecords > 0);
        Assert.assertTrue(combinerInputRecords > combinerOutputRecords);
        BufferedReader br = new BufferedReader(new FileReader(new File(out, "part-r-00000")));
        Set<String> output = new HashSet<String>();
        String line = br.readLine();
        Assert.assertNotNull(line);
        output.add(line.substring(0, 1) + line.substring(4, 5));
        line = br.readLine();
        Assert.assertNotNull(line);
        output.add(line.substring(0, 1) + line.substring(4, 5));
        line = br.readLine();
        Assert.assertNull(line);
        br.close();
        Set<String> expected = new HashSet<String>();
        expected.add("A2");
        expected.add("B5");
        Assert.assertEquals(expected, output);
    } else {
        Assert.fail("Job failed");
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileWriter(java.io.FileWriter) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) File(java.io.File) JobConf(org.apache.hadoop.mapred.JobConf) PrintWriter(java.io.PrintWriter) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 93 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestDistributedShell method testDSShellWithShellScript.

@Test
public void testDSShellWithShellScript() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();
    final File customShellScript = new File(tmpDir, "custom_script.sh");
    if (customShellScript.exists()) {
        customShellScript.delete();
    }
    if (!customShellScript.createNewFile()) {
        Assert.fail("Can not create custom shell script file.");
    }
    PrintWriter fileWriter = new PrintWriter(customShellScript);
    // set the output to DEBUG level
    fileWriter.write("echo testDSShellWithShellScript");
    fileWriter.close();
    System.out.println(customShellScript.getAbsolutePath());
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "1", "--shell_script", customShellScript.getAbsolutePath(), "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores", "1" };
    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    List<String> expectedContent = new ArrayList<String>();
    expectedContent.add("testDSShellWithShellScript");
    verifyContainerLog(1, expectedContent, false, "");
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ArrayList(java.util.ArrayList) TestTimelineClient(org.apache.hadoop.yarn.client.api.impl.TestTimelineClient) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) File(java.io.File) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 94 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class TestDistributedShell method testDSShellWithCustomLogPropertyFile.

@Test
public void testDSShellWithCustomLogPropertyFile() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();
    final File customLogProperty = new File(tmpDir, "custom_log4j.properties");
    if (customLogProperty.exists()) {
        customLogProperty.delete();
    }
    if (!customLogProperty.createNewFile()) {
        Assert.fail("Can not create custom log4j property file.");
    }
    PrintWriter fileWriter = new PrintWriter(customLogProperty);
    // set the output to DEBUG level
    fileWriter.write("log4j.rootLogger=debug,stdout");
    fileWriter.close();
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "3", "--shell_command", "echo", "--shell_args", "HADOOP", "--log_properties", customLogProperty.getAbsolutePath(), "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores", "1" };
    //Before run the DS, the default the log level is INFO
    final Log LOG_Client = LogFactory.getLog(Client.class);
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertFalse(LOG_Client.isDebugEnabled());
    final Log LOG_AM = LogFactory.getLog(ApplicationMaster.class);
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertFalse(LOG_AM.isDebugEnabled());
    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(verifyContainerLog(3, null, true, "DEBUG") > 10);
    //After DS is finished, the log level should be DEBUG
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertTrue(LOG_Client.isDebugEnabled());
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertTrue(LOG_AM.isDebugEnabled());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) Log(org.apache.commons.logging.Log) TestTimelineClient(org.apache.hadoop.yarn.client.api.impl.TestTimelineClient) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) File(java.io.File) PrintWriter(java.io.PrintWriter) Test(org.junit.Test)

Example 95 with PrintWriter

use of java.io.PrintWriter in project hadoop by apache.

the class SerializedExceptionPBImpl method init.

public void init(Throwable t) {
    maybeInitBuilder();
    if (t == null) {
        return;
    }
    if (t.getCause() == null) {
    } else {
        builder.setCause(new SerializedExceptionPBImpl(t.getCause()).getProto());
    }
    StringWriter sw = new StringWriter();
    PrintWriter pw = new PrintWriter(sw);
    t.printStackTrace(pw);
    pw.close();
    if (sw.toString() != null)
        builder.setTrace(sw.toString());
    if (t.getMessage() != null)
        builder.setMessage(t.getMessage());
    builder.setClassName(t.getClass().getCanonicalName());
}
Also used : StringWriter(java.io.StringWriter) PrintWriter(java.io.PrintWriter)

Aggregations

PrintWriter (java.io.PrintWriter)3529 StringWriter (java.io.StringWriter)1062 IOException (java.io.IOException)653 File (java.io.File)532 Test (org.junit.Test)432 FileOutputStream (java.io.FileOutputStream)293 FileWriter (java.io.FileWriter)274 OutputStreamWriter (java.io.OutputStreamWriter)255 BufferedReader (java.io.BufferedReader)180 ArrayList (java.util.ArrayList)171 HttpServletResponse (javax.servlet.http.HttpServletResponse)141 ByteArrayOutputStream (java.io.ByteArrayOutputStream)139 FastPrintWriter (com.android.internal.util.FastPrintWriter)124 InputStreamReader (java.io.InputStreamReader)123 HttpServletRequest (javax.servlet.http.HttpServletRequest)121 Date (java.util.Date)120 HashMap (java.util.HashMap)113 Map (java.util.Map)106 BufferedWriter (java.io.BufferedWriter)105 Writer (java.io.Writer)87