use of org.apache.commons.io.output.ByteArrayOutputStream in project gocd by gocd.
the class GoFileConfigDataSource method configAsXml.
public String configAsXml(CruiseConfig config, boolean skipPreprocessingAndValidation) throws Exception {
LOGGER.debug("[Config Save] === Converting config to XML");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
magicalGoConfigXmlWriter.write(config, outputStream, skipPreprocessingAndValidation);
LOGGER.debug("[Config Save] === Done converting config to XML");
return outputStream.toString();
}
use of org.apache.commons.io.output.ByteArrayOutputStream in project gocd by gocd.
the class GoFileConfigDataSource method encryptPasswords.
private void encryptPasswords(File configFile) throws Exception {
String currentContent = FileUtils.readFileToString(configFile, UTF_8);
GoConfigHolder configHolder = magicalGoConfigXmlLoader.loadConfigHolder(currentContent);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
magicalGoConfigXmlWriter.write(configHolder.configForEdit, stream, true);
String postEncryptContent = new String(stream.toByteArray());
if (!currentContent.equals(postEncryptContent)) {
LOGGER.debug("[Encrypt] Writing config to file");
FileUtils.writeStringToFile(configFile, postEncryptContent);
}
}
use of org.apache.commons.io.output.ByteArrayOutputStream in project opentheso by miledrousset.
the class ApacheFOP method test_sparna.
public void test_sparna() throws IOException, TransformerConfigurationException, TransformerException {
String input = readFile("test_unesco.rdf");
System.out.println(input);
// init XSLT
File initialFile = new File("skos-alpha.xsl");
InputStream is = Files.asByteSource(initialFile).openStream();
TransformerFactory factory = TransformerFactory.newInstance();
Transformer t = factory.newTransformer(new StreamSource(is));
// Apply XSLT
StreamSource xmlSource = new StreamSource(new ByteArrayInputStream(input.getBytes("UTF-8")));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
StreamResult xslResult = new StreamResult(new OutputStreamWriter(baos, "UTF-8"));
t.setParameter("docId", "jeTestAvecRandomId");
t.transform(xmlSource, xslResult);
System.out.println("------------------------------------");
}
use of org.apache.commons.io.output.ByteArrayOutputStream in project vcell by virtualcell.
the class VCMongoDbDriver method getBLOB.
public byte[] getBLOB(ObjectId objectId) {
try {
if (m == null) {
String mongoDbHost = PropertyLoader.getRequiredProperty(PropertyLoader.mongodbHostInternal);
// default 27017
int mongoDbPort = Integer.parseInt(PropertyLoader.getRequiredProperty(PropertyLoader.mongodbPortInternal));
m = new MongoClient(mongoDbHost, mongoDbPort);
}
ByteArrayOutputStream streamToDownloadTo = new ByteArrayOutputStream();
MongoDatabase db = m.getDatabase(mongoDbDatabaseName);
GridFSBucket gridFSBucket = GridFSBuckets.create(db);
gridFSBucket.downloadToStream(objectId, streamToDownloadTo);
byte[] blob = streamToDownloadTo.toByteArray();
return blob;
} catch (Exception e) {
e.printStackTrace(System.out);
try {
if (m != null) {
m.close();
}
} catch (Exception e2) {
e2.printStackTrace(System.out);
} finally {
m = null;
}
throw new RuntimeException("failed to retrieve BLOB with ObjectId " + objectId.toHexString() + ": " + e.getMessage(), e);
}
}
use of org.apache.commons.io.output.ByteArrayOutputStream in project Anserini by castorini.
the class IndexW2V method indexEmbeddings.
public void indexEmbeddings() throws IOException, InterruptedException {
LOG.info("Starting indexer...");
long startTime = System.currentTimeMillis();
final WhitespaceAnalyzer analyzer = new WhitespaceAnalyzer();
final IndexWriterConfig config = new IndexWriterConfig(analyzer);
final IndexWriter writer = new IndexWriter(directory, config);
BufferedReader bRdr = new BufferedReader(new FileReader(args.input));
String line = null;
bRdr.readLine();
Document document = new Document();
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
int cnt = 0;
while ((line = bRdr.readLine()) != null) {
String[] termEmbedding = line.trim().split("\t");
document.add(new StringField(LuceneDocumentGenerator.FIELD_ID, termEmbedding[0], Field.Store.NO));
String[] parts = termEmbedding[1].split(" ");
for (int i = 0; i < parts.length; ++i) {
byteStream.write(ByteBuffer.allocate(4).putFloat(Float.parseFloat(parts[i])).array());
}
document.add(new StoredField(FIELD_BODY, byteStream.toByteArray()));
byteStream.flush();
byteStream.reset();
writer.addDocument(document);
document.clear();
cnt++;
if (cnt % 100000 == 0) {
LOG.info(cnt + " terms indexed");
}
}
LOG.info(String.format("Total of %s terms added", cnt));
try {
writer.commit();
writer.forceMerge(1);
} finally {
try {
writer.close();
} catch (IOException e) {
LOG.error(e);
}
}
LOG.info("Total elapsed time: " + (System.currentTimeMillis() - startTime) + "ms");
}
Aggregations