use of au.com.bytecode.opencsv.CSVWriter in project graylog2-server by Graylog2.
the class ScrollChunkWriter method writeTo.
@Override
public void writeTo(ScrollResult.ScrollChunk scrollChunk, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
if (LOG.isDebugEnabled()) {
LOG.debug("[{}] Writing chunk {}", Thread.currentThread().getId(), scrollChunk.getChunkNumber());
}
final CSVWriter csvWriter = new CSVWriter(new OutputStreamWriter(entityStream, StandardCharsets.UTF_8));
final List<String> fields = scrollChunk.getFields();
final int numberOfFields = fields.size();
if (scrollChunk.isFirstChunk()) {
// write field headers only on first chunk
csvWriter.writeNext(fields.toArray(new String[numberOfFields]));
}
// write result set in same order as the header row
final String[] fieldValues = new String[numberOfFields];
for (ResultMessage message : scrollChunk.getMessages()) {
int idx = 0;
// first collect all values from the current message
for (String fieldName : fields) {
final Object val = message.getMessage().getField(fieldName);
if (val == null) {
fieldValues[idx] = null;
} else {
String stringVal = val.toString();
fieldValues[idx] = stringVal.replaceAll("\n", "\\\\n").replaceAll("\r", "\\\\r");
}
idx++;
}
// write the complete line, some fields might not be present in the message, so there might be null values
csvWriter.writeNext(fieldValues);
}
if (csvWriter.checkError()) {
LOG.error("Encountered unspecified error when writing message result as CSV, result is likely malformed.");
}
csvWriter.close();
}
use of au.com.bytecode.opencsv.CSVWriter in project searchcode-server by boyter.
the class IndexBaseRepoJob method logIndexed.
/**
* Logs to the logs directory a formatted CSV of the supplied list strings
*/
private void logIndexed(String repoName, List<String[]> reportList) {
try {
CSVWriter writer = new CSVWriter(new FileWriter(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp"));
writer.writeAll(reportList);
writer.flush();
writer.close();
Path source = Paths.get(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp");
Files.move(source, source.resolveSibling(repoName + ".csv"), StandardCopyOption.REPLACE_EXISTING);
} catch (IOException ex) {
Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " logIndexed for " + repoName + "\n with message: " + ex.getMessage());
}
}
use of au.com.bytecode.opencsv.CSVWriter in project gatk-protected by broadinstitute.
the class DecomposeSingularValues method writeMatrix.
private void writeMatrix(final RealMatrix m, final File outputFilename) throws IOException {
final List<String[]> textTable = new ArrayList<>();
for (int i = 0; i < m.getRowDimension(); i++) {
textTable.add(Arrays.stream(m.getRow(i)).mapToObj(Double::toString).toArray(String[]::new));
}
final FileWriter fw = new FileWriter(outputFilename);
CSVWriter csvWriter = new CSVWriter(fw, '\t', CSVWriter.NO_QUOTE_CHARACTER);
csvWriter.writeAll(textTable);
csvWriter.flush();
csvWriter.close();
}
use of au.com.bytecode.opencsv.CSVWriter in project hive by apache.
the class OpenCSVSerde method serialize.
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
final StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
final List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
if (outputFieldRefs.size() != numCols) {
throw new SerDeException("Cannot serialize the object because there are " + outputFieldRefs.size() + " fields but the table has " + numCols + " columns.");
}
// Get all data out.
for (int c = 0; c < numCols; c++) {
final Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
final ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();
// The data must be of type String
final StringObjectInspector fieldStringOI = (StringObjectInspector) fieldOI;
// Convert the field to Java class String, because objects of String type
// can be stored in String, Text, or some other classes.
outputFields[c] = fieldStringOI.getPrimitiveJavaObject(field);
}
final StringWriter writer = new StringWriter();
final CSVWriter csv = newWriter(writer, separatorChar, quoteChar, escapeChar);
try {
csv.writeNext(outputFields);
csv.close();
return new Text(writer.toString());
} catch (final IOException ioe) {
throw new SerDeException(ioe);
}
}
use of au.com.bytecode.opencsv.CSVWriter in project OpenRefine by OpenRefine.
the class CsvExporter method export.
@Override
public void export(Project project, Properties params, Engine engine, final Writer writer) throws IOException {
String optionsString = (params == null) ? null : params.getProperty("options");
JSONObject options = null;
if (optionsString != null) {
try {
options = ParsingUtilities.evaluateJsonStringToObject(optionsString);
} catch (JSONException e) {
// Ignore and keep options null.
}
}
final String separator = options == null ? Character.toString(this.separator) : JSONUtilities.getString(options, "separator", Character.toString(this.separator));
final String lineSeparator = options == null ? CSVWriter.DEFAULT_LINE_END : JSONUtilities.getString(options, "lineSeparator", CSVWriter.DEFAULT_LINE_END);
final boolean quoteAll = options == null ? false : JSONUtilities.getBoolean(options, "quoteAll", false);
final boolean printColumnHeader = (params != null && params.getProperty("printColumnHeader") != null) ? Boolean.parseBoolean(params.getProperty("printColumnHeader")) : true;
final CSVWriter csvWriter = new CSVWriter(writer, separator.charAt(0), CSVWriter.DEFAULT_QUOTE_CHARACTER, lineSeparator);
TabularSerializer serializer = new TabularSerializer() {
@Override
public void startFile(JSONObject options) {
}
@Override
public void endFile() {
}
@Override
public void addRow(List<CellData> cells, boolean isHeader) {
if (!isHeader || printColumnHeader) {
String[] strings = new String[cells.size()];
for (int i = 0; i < strings.length; i++) {
CellData cellData = cells.get(i);
strings[i] = (cellData != null && cellData.text != null) ? cellData.text : "";
}
csvWriter.writeNext(strings, quoteAll);
}
}
};
CustomizableTabularExporterUtilities.exportRows(project, engine, params, serializer);
csvWriter.close();
}
Aggregations