use of java.io.FileOutputStream in project hadoop by apache.
the class Util method createWriter.
/** Create a writer of a local file. */
public static PrintWriter createWriter(File dir, String prefix) throws IOException {
checkDirectory(dir);
SimpleDateFormat dateFormat = new SimpleDateFormat("-yyyyMMdd-HHmmssSSS");
for (; ; ) {
final File f = new File(dir, prefix + dateFormat.format(new Date(System.currentTimeMillis())) + ".txt");
if (!f.exists())
return new PrintWriter(new OutputStreamWriter(new FileOutputStream(f), Charsets.UTF_8));
try {
Thread.sleep(10);
} catch (InterruptedException e) {
}
}
}
use of java.io.FileOutputStream in project hadoop by apache.
the class GenSort method main.
public static void main(String[] args) throws Exception {
Unsigned16 startingRecord = new Unsigned16();
Unsigned16 numberOfRecords;
OutputStream out;
boolean useAscii = false;
Unsigned16 checksum = null;
int i;
for (i = 0; i < args.length; ++i) {
String arg = args[i];
int argLength = arg.length();
if (argLength >= 1 && arg.charAt(0) == '-') {
if (argLength < 2) {
usage();
}
switch(arg.charAt(1)) {
case 'a':
useAscii = true;
break;
case 'b':
startingRecord = Unsigned16.fromDecimal(arg.substring(2));
break;
case 'c':
checksum = new Unsigned16();
break;
default:
usage();
}
} else {
break;
}
}
if (args.length - i != 2) {
usage();
}
numberOfRecords = Unsigned16.fromDecimal(args[i]);
out = new FileOutputStream(args[i + 1]);
outputRecords(out, useAscii, startingRecord, numberOfRecords, checksum);
out.close();
if (checksum != null) {
System.out.println(checksum);
}
}
use of java.io.FileOutputStream in project hadoop by apache.
the class RumenToSLSConverter method generateSLSLoadFile.
private static void generateSLSLoadFile(String inputFile, String outputFile) throws IOException {
try (Reader input = new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
Iterator<Map> i = mapper.readValues(new JsonFactory().createParser(input), Map.class);
while (i.hasNext()) {
Map m = i.next();
output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
}
}
}
}
use of java.io.FileOutputStream in project hadoop by apache.
the class RumenToSLSConverter method generateSLSNodeFile.
@SuppressWarnings("unchecked")
private static void generateSLSNodeFile(String outputFile) throws IOException {
try (Writer output = new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
ObjectMapper mapper = new ObjectMapper();
ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
for (Map.Entry<String, Set<String>> entry : rackNodeMap.entrySet()) {
Map rack = new LinkedHashMap();
rack.put("rack", entry.getKey());
List nodes = new ArrayList();
for (String name : entry.getValue()) {
Map node = new LinkedHashMap();
node.put("node", name);
nodes.add(node);
}
rack.put("nodes", nodes);
output.write(writer.writeValueAsString(rack) + EOL);
}
}
}
use of java.io.FileOutputStream in project hadoop by apache.
the class TestFSDownload method createJarFile.
static LocalResource createJarFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException, URISyntaxException {
byte[] bytes = new byte[len];
r.nextBytes(bytes);
File archiveFile = new File(p.toUri().getPath() + ".jar");
archiveFile.createNewFile();
JarOutputStream out = new JarOutputStream(new FileOutputStream(archiveFile));
out.putNextEntry(new JarEntry(p.getName()));
out.write(bytes);
out.closeEntry();
out.close();
LocalResource ret = recordFactory.newRecordInstance(LocalResource.class);
ret.setResource(URL.fromPath(new Path(p.toString() + ".jar")));
ret.setSize(len);
ret.setType(LocalResourceType.ARCHIVE);
ret.setVisibility(vis);
ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".jar")).getModificationTime());
return ret;
}
Aggregations