use of org.apache.commons.io.output.FileWriterWithEncoding in project hadoop by apache.
the class HadoopArchiveLogs method generateScript.
/*
The generated script looks like this:
#!/bin/bash
set -e
set -x
if [ "$YARN_SHELL_ID" == "1" ]; then
appId="application_1440448768987_0001"
user="rkanter"
elif [ "$YARN_SHELL_ID" == "2" ]; then
appId="application_1440448768987_0002"
user="rkanter"
else
echo "Unknown Mapping!"
exit 1
fi
export HADOOP_CLIENT_OPTS="-Xmx1024m"
export HADOOP_CLASSPATH=/dist/share/hadoop/tools/lib/hadoop-archive-logs-2.8.0-SNAPSHOT.jar:/dist/share/hadoop/tools/lib/hadoop-archives-2.8.0-SNAPSHOT.jar
"$HADOOP_HOME"/bin/hadoop org.apache.hadoop.tools.HadoopArchiveLogsRunner -appId "$appId" -user "$user" -workingDir /tmp/logs/archive-logs-work -remoteRootLogDir /tmp/logs -suffix logs
*/
@VisibleForTesting
void generateScript(File localScript, Path workingDir, Path remoteRootLogDir, String suffix) throws IOException {
if (verbose) {
LOG.info("Generating script at: " + localScript.getAbsolutePath());
}
String halrJarPath = HadoopArchiveLogsRunner.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String harJarPath = HadoopArchives.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String classpath = halrJarPath + File.pathSeparator + harJarPath;
FileWriterWithEncoding fw = null;
try {
fw = new FileWriterWithEncoding(localScript, "UTF-8");
fw.write("#!/bin/bash\nset -e\nset -x\n");
int containerCount = 1;
for (AppInfo app : eligibleApplications) {
fw.write("if [ \"$YARN_SHELL_ID\" == \"");
fw.write(Integer.toString(containerCount));
fw.write("\" ]; then\n\tappId=\"");
fw.write(app.getAppId());
fw.write("\"\n\tuser=\"");
fw.write(app.getUser());
fw.write("\"\nel");
containerCount++;
}
fw.write("se\n\techo \"Unknown Mapping!\"\n\texit 1\nfi\n");
fw.write("export HADOOP_CLIENT_OPTS=\"-Xmx");
fw.write(Long.toString(memory));
fw.write("m\"\n");
fw.write("export HADOOP_CLASSPATH=");
fw.write(classpath);
fw.write("\n\"$HADOOP_HOME\"/bin/hadoop ");
fw.write(HadoopArchiveLogsRunner.class.getName());
fw.write(" -appId \"$appId\" -user \"$user\" -workingDir ");
fw.write(workingDir.toString());
fw.write(" -remoteRootLogDir ");
fw.write(remoteRootLogDir.toString());
fw.write(" -suffix ");
fw.write(suffix);
if (!proxy) {
fw.write(" -noProxy\n");
}
fw.write("\n");
} finally {
if (fw != null) {
fw.close();
}
}
}
use of org.apache.commons.io.output.FileWriterWithEncoding in project dkpro-tc by dkpro.
the class WekaOutcomeIDReport method execute.
@Override
public void execute() throws Exception {
init();
File arff = WekaUtils.getFile(getContext(), "", FILENAME_PREDICTIONS, AccessMode.READONLY);
mlResults = WekaUtils.getFile(getContext(), "", WekaTestTask.evaluationBin, AccessMode.READONLY);
Instances predictions = WekaUtils.getInstances(arff, isMultiLabel);
List<String> labels = getLabels(isMultiLabel, isRegression);
Properties props;
if (isMultiLabel) {
MultilabelResult r = WekaUtils.readMlResultFromFile(mlResults);
props = generateMlProperties(predictions, labels, r);
} else {
Map<Integer, String> documentIdMap = loadDocumentMap();
props = generateSlProperties(predictions, isRegression, isUnit, documentIdMap, labels);
}
FileWriterWithEncoding fw = null;
try {
fw = new FileWriterWithEncoding(getTargetOutputFile(), "utf-8");
props.store(fw, generateHeader(labels));
} finally {
IOUtils.closeQuietly(fw);
}
}
use of org.apache.commons.io.output.FileWriterWithEncoding in project ANNIS by korpling.
the class AnnisRunner method resetCaches.
private void resetCaches(AnnisRunner.OS currentOS) {
switch(currentOS) {
case linux:
try {
log.info("resetting caches");
log.debug("syncing");
Runtime.getRuntime().exec("sync").waitFor();
File dropCaches = new File("/proc/sys/vm/drop_caches");
if (dropCaches.canWrite()) {
log.debug("clearing file system cache");
try (Writer w = new FileWriterWithEncoding(dropCaches, "UTF-8")) {
w.write("3");
}
} else {
log.warn("Cannot clear file system cache of the operating system");
}
File postgresScript = new File("/etc/init.d/postgresql");
if (postgresScript.exists() && postgresScript.isFile()) {
log.debug("restarting postgresql");
Runtime.getRuntime().exec(postgresScript.getAbsolutePath() + " restart").waitFor();
} else {
log.warn("Cannot restart postgresql");
}
} catch (IOException | InterruptedException ex) {
log.error(null, ex);
}
break;
default:
log.warn("Cannot reset cache on this operating system");
}
}
use of org.apache.commons.io.output.FileWriterWithEncoding in project ANNIS by korpling.
the class CorpusAdministration method writeDatabasePropertiesFile.
// /// Helper
protected void writeDatabasePropertiesFile(String host, String port, String database, String user, String password, boolean useSSL, String schema) {
File file = new File(System.getProperty("annis.home") + "/conf", "database.properties");
try (BufferedWriter writer = new BufferedWriter(new FileWriterWithEncoding(file, "UTF-8"))) {
writer.write("# database configuration\n");
writer.write("datasource.driver=org.postgresql.Driver\n");
writer.write("datasource.url=jdbc:postgresql://" + host + ":" + port + "/" + database + "\n");
writer.write("datasource.username=" + user + "\n");
writer.write("datasource.password=" + password + "\n");
writer.write("datasource.ssl=" + (useSSL ? "true" : "false") + "\n");
if (schema != null) {
writer.write("datasource.schema=" + schema + "\n");
}
} catch (IOException e) {
log.error("Couldn't write database properties file", e);
throw new FileAccessException(e);
}
log.info("Wrote database configuration to " + file.getAbsolutePath());
}
use of org.apache.commons.io.output.FileWriterWithEncoding in project polymap4-core by Polymap4.
the class Graphic method equipSvg.
public static void equipSvg(File f) {
try {
// read/parse
String parser = XMLResourceDescriptor.getXMLParserClassName();
SAXSVGDocumentFactory factory = new SAXSVGDocumentFactory(parser);
Document doc = factory.createDocument(f.toURI().toURL().toString());
// modify
List<String> NAMES = Lists.newArrayList("circle", "ellipse", "line", "mesh", "path", "polygon", "polyline", "rect");
NodeList elms = doc.getElementsByTagName("*");
for (int i = 0; i < elms.getLength(); i++) {
Node node = elms.item(i);
if (node instanceof Element && NAMES.contains(node.getNodeName())) {
((Element) node).setAttribute("fill", "param(fill-color)");
((Element) node).setAttribute("fill-opacity", "param(fill-opacity)");
((Element) node).setAttribute("stroke", "param(stroke-color)");
((Element) node).setAttribute("stroke-opacity", "param(stroke-opacity)");
((Element) node).setAttribute("stroke-width", "param(stroke-width)");
}
}
// write
try (FileWriterWithEncoding out = new FileWriterWithEncoding(f, "UTF-8")) {
SVGTranscoder t = new SVGTranscoder();
t.transcode(new TranscoderInput(doc), new TranscoderOutput(out));
}
log.info("SVG: " + FileUtils.readFileToString(f, "UTF-8"));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
Aggregations