Search in sources :

Example 96 with FileOutputStream

use of java.io.FileOutputStream in project hadoop by apache.

the class Util method createWriter.

/** Create a writer of a local file. */
public static PrintWriter createWriter(File dir, String prefix) throws IOException {
    checkDirectory(dir);
    SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
    for (; ; ) {
        final File f = new File(dir, prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
        if (!f.exists())
            return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
        try {
            Thread.sleep(10);
        } catch (InterruptedException e) {
        }
    }
}
Also used : FileOutputStream(java.io.FileOutputStream) OutputStreamWriter(java.io.OutputStreamWriter) SimpleDateFormat(java.text.SimpleDateFormat) File(java.io.File) Date(java.util.Date) PrintWriter(java.io.PrintWriter)

Example 97 with FileOutputStream

use of java.io.FileOutputStream in project hadoop by apache.

the class GenSort method main.

public static void main(String[] args) throws Exception {
    Unsigned16 startingRecord = new Unsigned16();
    Unsigned16 numberOfRecords;
    OutputStream out;
    boolean useAscii = false;
    Unsigned16 checksum = null;
    int i;
    for (i = 0; i < args.length; ++i) {
        String arg = args[i];
        int argLength = arg.length();
        if (argLength >= 1 && arg.charAt(0) == '-') {
            if (argLength < 2) {
                usage();
            }
            switch(arg.charAt(1)) {
                case 'a':
                    useAscii = true;
                    break;
                case 'b':
                    startingRecord = Unsigned16.fromDecimal(arg.substring(2));
                    break;
                case 'c':
                    checksum = new Unsigned16();
                    break;
                default:
                    usage();
            }
        } else {
            break;
        }
    }
    if (args.length - i != 2) {
        usage();
    }
    numberOfRecords = Unsigned16.fromDecimal(args[i]);
    out = new FileOutputStream(args[i + 1]);
    outputRecords(out, useAscii, startingRecord, numberOfRecords, checksum);
    out.close();
    if (checksum != null) {
        System.out.println(checksum);
    }
}
Also used : OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileOutputStream(java.io.FileOutputStream)

Example 98 with FileOutputStream

use of java.io.FileOutputStream in project hadoop by apache.

the class RumenToSLSConverter method generateSLSLoadFile.

private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException {
    try (Reader input = new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
        try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
            ObjectMapper mapper = new ObjectMapper();
            ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
            Iterator<Map> i = mapper.readValues(new JsonFactory().createParser(input), Map.class);
            while (i.hasNext()) {
                Map m = i.next();
                output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
            }
        }
    }
}
Also used : InputStreamReader(java.io.InputStreamReader) FileOutputStream(java.io.FileOutputStream) JsonFactory(com.fasterxml.jackson.core.JsonFactory) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) OutputStreamWriter(java.io.OutputStreamWriter) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) FileInputStream(java.io.FileInputStream) OutputStreamWriter(java.io.OutputStreamWriter) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Writer(java.io.Writer) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 99 with FileOutputStream

use of java.io.FileOutputStream in project hadoop by apache.

the class RumenToSLSConverter method generateSLSNodeFile.

@SuppressWarnings("unchecked")
private static void generateSLSNodeFile(String outputFile) throws IOException {
    try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
        ObjectMapper mapper = new ObjectMapper();
        ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
        for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
            Map rack = new LinkedHashMap();
            rack.put("rack", entry.getKey());
            List nodes = new ArrayList();
            for (String name : entry.getValue()) {
                Map node = new LinkedHashMap();
                node.put("node", name);
                nodes.add(node);
            }
            rack.put("nodes", nodes);
            output.write(writer.writeValueAsString(rack) + EOL);
        }
    }
}
Also used : TreeSet(java.util.TreeSet) Set(java.util.Set) FileOutputStream(java.io.FileOutputStream) ArrayList(java.util.ArrayList) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) OutputStreamWriter(java.io.OutputStreamWriter) ArrayList(java.util.ArrayList) List(java.util.List) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TreeMap(java.util.TreeMap) OutputStreamWriter(java.io.OutputStreamWriter) ObjectWriter(com.fasterxml.jackson.databind.ObjectWriter) Writer(java.io.Writer) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) LinkedHashMap(java.util.LinkedHashMap)

Example 100 with FileOutputStream

use of java.io.FileOutputStream in project hadoop by apache.

the class TestFSDownload method createJarFile.

static LocalResource createJarFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException, URISyntaxException {
    byte[] bytes = new byte[len];
    r.nextBytes(bytes);
    File archiveFile = new File(p.toUri().getPath() + ".jar");
    archiveFile.createNewFile();
    JarOutputStream out = new JarOutputStream(new FileOutputStream(archiveFile));
    out.putNextEntry(new JarEntry(p.getName()));
    out.write(bytes);
    out.closeEntry();
    out.close();
    LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
    ret.setResource(URL.fromPath(new Path(p.toString() + ".jar")));
    ret.setSize(len);
    ret.setType(LocalResourceType.ARCHIVE);
    ret.setVisibility(vis);
    ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".jar")).getModificationTime());
    return ret;
}
Also used : Path(org.apache.hadoop.fs.Path) FileOutputStream(java.io.FileOutputStream) JarOutputStream(java.util.jar.JarOutputStream) JarEntry(java.util.jar.JarEntry) File(java.io.File) LocalResource(org.apache.hadoop.yarn.api.records.LocalResource)

Aggregations

FileOutputStream (java.io.FileOutputStream)5940 File (java.io.File)3316 IOException (java.io.IOException)2581 FileInputStream (java.io.FileInputStream)1060 OutputStream (java.io.OutputStream)960 BufferedOutputStream (java.io.BufferedOutputStream)793 InputStream (java.io.InputStream)771 FileNotFoundException (java.io.FileNotFoundException)634 OutputStreamWriter (java.io.OutputStreamWriter)586 Test (org.junit.Test)471 BufferedWriter (java.io.BufferedWriter)291 PrintWriter (java.io.PrintWriter)291 ZipEntry (java.util.zip.ZipEntry)277 DataOutputStream (java.io.DataOutputStream)252 ByteArrayOutputStream (java.io.ByteArrayOutputStream)249 ZipOutputStream (java.util.zip.ZipOutputStream)240 BufferedInputStream (java.io.BufferedInputStream)239 ArrayList (java.util.ArrayList)216 Writer (java.io.Writer)197 URL (java.net.URL)184