Search in sources :

Example 46 with OutputStreamWriter

use of java.io.OutputStreamWriter in project hadoop by apache.

the class ApplicationCLI method listApplicationAttempts.

/**
   * Lists the application attempts matching the given applicationid
   * 
   * @param applicationId
   * @throws YarnException
   * @throws IOException
   */
private void listApplicationAttempts(String applicationId) throws YarnException, IOException {
    PrintWriter writer = new PrintWriter(new OutputStreamWriter(sysout, Charset.forName("UTF-8")));
    List<ApplicationAttemptReport> appAttemptsReport = client.getApplicationAttempts(ApplicationId.fromString(applicationId));
    writer.println("Total number of application attempts " + ":" + appAttemptsReport.size());
    writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id", "State", "AM-Container-Id", "Tracking-URL");
    for (ApplicationAttemptReport appAttemptReport : appAttemptsReport) {
        writer.printf(APPLICATION_ATTEMPTS_PATTERN, appAttemptReport.getApplicationAttemptId(), appAttemptReport.getYarnApplicationAttemptState(), appAttemptReport.getAMContainerId() == null ? "N/A" : appAttemptReport.getAMContainerId().toString(), appAttemptReport.getTrackingUrl());
    }
    writer.flush();
}
Also used : ApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptReport) OutputStreamWriter(java.io.OutputStreamWriter) PrintWriter(java.io.PrintWriter)

Example 47 with OutputStreamWriter

use of java.io.OutputStreamWriter in project hadoop by apache.

the class ApplicationCLI method printApplicationAttemptReport.

/**
   * Prints the application attempt report for an application attempt id.
   * 
   * @param applicationAttemptId
   * @return exitCode
   * @throws YarnException
   */
private int printApplicationAttemptReport(String applicationAttemptId) throws YarnException, IOException {
    ApplicationAttemptReport appAttemptReport = null;
    try {
        appAttemptReport = client.getApplicationAttemptReport(ApplicationAttemptId.fromString(applicationAttemptId));
    } catch (ApplicationNotFoundException e) {
        sysout.println("Application for AppAttempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server.");
        return -1;
    } catch (ApplicationAttemptNotFoundException e) {
        sysout.println("Application Attempt with id '" + applicationAttemptId + "' doesn't exist in RM or Timeline Server.");
        return -1;
    }
    // Use PrintWriter.println, which uses correct platform line ending.
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    PrintWriter appAttemptReportStr = new PrintWriter(new OutputStreamWriter(baos, Charset.forName("UTF-8")));
    if (appAttemptReport != null) {
        appAttemptReportStr.println("Application Attempt Report : ");
        appAttemptReportStr.print("\tApplicationAttempt-Id : ");
        appAttemptReportStr.println(appAttemptReport.getApplicationAttemptId());
        appAttemptReportStr.print("\tState : ");
        appAttemptReportStr.println(appAttemptReport.getYarnApplicationAttemptState());
        appAttemptReportStr.print("\tAMContainer : ");
        appAttemptReportStr.println(appAttemptReport.getAMContainerId() == null ? "N/A" : appAttemptReport.getAMContainerId().toString());
        appAttemptReportStr.print("\tTracking-URL : ");
        appAttemptReportStr.println(appAttemptReport.getTrackingUrl());
        appAttemptReportStr.print("\tRPC Port : ");
        appAttemptReportStr.println(appAttemptReport.getRpcPort());
        appAttemptReportStr.print("\tAM Host : ");
        appAttemptReportStr.println(appAttemptReport.getHost());
        appAttemptReportStr.print("\tDiagnostics : ");
        appAttemptReportStr.print(appAttemptReport.getDiagnostics());
    } else {
        appAttemptReportStr.print("Application Attempt with id '" + applicationAttemptId + "' doesn't exist in Timeline Server.");
        appAttemptReportStr.close();
        sysout.println(baos.toString("UTF-8"));
        return -1;
    }
    appAttemptReportStr.close();
    sysout.println(baos.toString("UTF-8"));
    return 0;
}
Also used : ApplicationAttemptReport(org.apache.hadoop.yarn.api.records.ApplicationAttemptReport) ApplicationNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException) OutputStreamWriter(java.io.OutputStreamWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ApplicationAttemptNotFoundException(org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException) PrintWriter(java.io.PrintWriter)

Example 48 with OutputStreamWriter

use of java.io.OutputStreamWriter in project hadoop by apache.

the class TestAggregatedLogFormat method writeSrcFileAndALog.

private void writeSrcFileAndALog(Path srcFilePath, String fileName, final long length, Path remoteAppLogFile, Path srcFileRoot, ContainerId testContainerId) throws Exception {
    File dir = new File(srcFilePath.toString());
    if (!dir.exists()) {
        if (!dir.mkdirs()) {
            throw new IOException("Unable to create directory : " + dir);
        }
    }
    File outputFile = new File(new File(srcFilePath.toString()), fileName);
    FileOutputStream os = new FileOutputStream(outputFile);
    final OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
    final int ch = filler;
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    LogWriter logWriter = new LogWriter(new Configuration(), remoteAppLogFile, ugi);
    LogKey logKey = new LogKey(testContainerId);
    LogValue logValue = spy(new LogValue(Collections.singletonList(srcFileRoot.toString()), testContainerId, ugi.getShortUserName()));
    final CountDownLatch latch = new CountDownLatch(1);
    Thread t = new Thread() {

        public void run() {
            try {
                for (int i = 0; i < length / 3; i++) {
                    osw.write(ch);
                }
                latch.countDown();
                for (int i = 0; i < (2 * length) / 3; i++) {
                    osw.write(ch);
                }
                osw.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    };
    t.start();
    //Wait till the osw is partially written
    //aggregation starts once the ows has completed 1/3rd of its work
    latch.await();
    //Aggregate The Logs
    logWriter.append(logKey, logValue);
    logWriter.close();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LogValue(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue) LogWriter(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter) FileOutputStream(java.io.FileOutputStream) LogKey(org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey) OutputStreamWriter(java.io.OutputStreamWriter) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) File(java.io.File) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 49 with OutputStreamWriter

use of java.io.OutputStreamWriter in project hadoop by apache.

the class TestAggregatedLogFormat method getOutputStreamWriter.

private OutputStreamWriter getOutputStreamWriter(Path srcFilePath, String fileName) throws IOException, FileNotFoundException, UnsupportedEncodingException {
    File dir = new File(srcFilePath.toString());
    if (!dir.exists()) {
        if (!dir.mkdirs()) {
            throw new IOException("Unable to create directory : " + dir);
        }
    }
    File outputFile = new File(new File(srcFilePath.toString()), fileName);
    FileOutputStream os = new FileOutputStream(outputFile);
    OutputStreamWriter osw = new OutputStreamWriter(os, "UTF8");
    return osw;
}
Also used : FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) IOException(java.io.IOException) File(java.io.File)

Example 50 with OutputStreamWriter

use of java.io.OutputStreamWriter in project hadoop by apache.

the class TestClusterMapReduceTestCase method _testMapReduce.

public void _testMapReduce(boolean restart) throws Exception {
    OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
    Writer wr = new OutputStreamWriter(os);
    wr.write("hello1\n");
    wr.write("hello2\n");
    wr.write("hello3\n");
    wr.write("hello4\n");
    wr.close();
    if (restart) {
        stopCluster();
        startCluster(false, null);
    }
    JobConf conf = createJobConf();
    conf.setJobName("mr");
    conf.setInputFormat(TextInputFormat.class);
    conf.setMapOutputKeyClass(LongWritable.class);
    conf.setMapOutputValueClass(Text.class);
    conf.setOutputFormat(TextOutputFormat.class);
    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);
    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
    FileInputFormat.setInputPaths(conf, getInputDir());
    FileOutputFormat.setOutputPath(conf, getOutputDir());
    JobClient.runJob(conf);
    Path[] outputFiles = FileUtil.stat2Paths(getFileSystem().listStatus(getOutputDir(), new Utils.OutputFileUtils.OutputFilesFilter()));
    if (outputFiles.length > 0) {
        InputStream is = getFileSystem().open(outputFiles[0]);
        BufferedReader reader = new BufferedReader(new InputStreamReader(is));
        String line = reader.readLine();
        int counter = 0;
        while (line != null) {
            counter++;
            assertTrue(line.contains("hello"));
            line = reader.readLine();
        }
        reader.close();
        assertEquals(4, counter);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) InputStreamReader(java.io.InputStreamReader) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) BufferedReader(java.io.BufferedReader) OutputStreamWriter(java.io.OutputStreamWriter) Writer(java.io.Writer) OutputStreamWriter(java.io.OutputStreamWriter)

Aggregations

OutputStreamWriter (java.io.OutputStreamWriter)1644 IOException (java.io.IOException)625 BufferedWriter (java.io.BufferedWriter)596 FileOutputStream (java.io.FileOutputStream)594 Writer (java.io.Writer)443 File (java.io.File)365 PrintWriter (java.io.PrintWriter)272 InputStreamReader (java.io.InputStreamReader)222 ByteArrayOutputStream (java.io.ByteArrayOutputStream)202 OutputStream (java.io.OutputStream)194 BufferedReader (java.io.BufferedReader)189 Test (org.junit.Test)123 InputStream (java.io.InputStream)92 ArrayList (java.util.ArrayList)90 FileNotFoundException (java.io.FileNotFoundException)88 Path (org.apache.hadoop.fs.Path)86 UnsupportedEncodingException (java.io.UnsupportedEncodingException)78 URL (java.net.URL)72 Socket (java.net.Socket)70 HttpURLConnection (java.net.HttpURLConnection)65