use of au.com.bytecode.opencsv.CSVWriter in project mica2 by obiba.
the class CsvTaxonomyCoverageWriter method write.
public ByteArrayOutputStream write(MicaSearch.TaxonomiesCoverageDto coverage) throws IOException {
ByteArrayOutputStream values = new ByteArrayOutputStream();
CSVWriter writer = null;
try {
writer = new CSVWriter(new PrintWriter(values));
List<String> bucketNames = writeHeader(writer, coverage);
writeBody(writer, coverage, bucketNames);
} finally {
if (writer != null)
writer.close();
}
return values;
}
use of au.com.bytecode.opencsv.CSVWriter in project mica2 by obiba.
the class CsvReportGeneratorImpl method write.
public void write(OutputStream outputStream) {
try (CSVWriter writer = new CSVWriter(new PrintWriter(new OutputStreamWriter(outputStream, "UTF-8")))) {
writeHeader(writer);
writeEachLine(writer);
outputStream.flush();
} catch (IOException e) {
log.error("CSV report extraction failed", e);
throw new UncheckedIOException(e);
} catch (Exception e) {
log.error("CSV report extraction failed", e);
}
}
use of au.com.bytecode.opencsv.CSVWriter in project hive by apache.
the class OpenCSVSerde method serialize.
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
final StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
final List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
if (outputFieldRefs.size() != numCols) {
throw new SerDeException("Cannot serialize the object because there are " + outputFieldRefs.size() + " fields but the table has " + numCols + " columns.");
}
// Get all data out.
for (int c = 0; c < numCols; c++) {
final Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
final ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();
// The data must be of type String
final StringObjectInspector fieldStringOI = (StringObjectInspector) fieldOI;
// Convert the field to Java class String, because objects of String type
// can be stored in String, Text, or some other classes.
outputFields[c] = fieldStringOI.getPrimitiveJavaObject(field);
}
final StringWriter writer = new StringWriter();
final CSVWriter csv = newWriter(writer, separatorChar, quoteChar, escapeChar);
try {
csv.writeNext(outputFields);
csv.close();
return new Text(writer.toString());
} catch (final IOException ioe) {
throw new SerDeException(ioe);
}
}
use of au.com.bytecode.opencsv.CSVWriter in project graylog2-server by Graylog2.
the class SearchResponseCsvWriter method writeTo.
@Override
public void writeTo(SearchResponse searchResponse, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
final CSVWriter csvWriter = new CSVWriter(new OutputStreamWriter(entityStream, StandardCharsets.UTF_8));
final ImmutableSortedSet<String> sortedFields = ImmutableSortedSet.copyOf(Iterables.concat(searchResponse.fields(), Lists.newArrayList("source", "timestamp")));
// write field headers
csvWriter.writeNext(sortedFields.toArray(new String[sortedFields.size()]));
// write result set in same order as the header row
final String[] fieldValues = new String[sortedFields.size()];
for (ResultMessageSummary message : searchResponse.messages()) {
int idx = 0;
// first collect all values from the current message
for (String fieldName : sortedFields) {
final Object val = message.message().get(fieldName);
fieldValues[idx++] = ((val == null) ? null : val.toString().replaceAll("\n", "\\\\n"));
fieldValues[idx++] = ((val == null) ? null : val.toString().replaceAll("\r", "\\\\r"));
}
// write the complete line, some fields might not be present in the message, so there might be null values
csvWriter.writeNext(fieldValues);
}
if (csvWriter.checkError()) {
LOG.error("Encountered unspecified error when writing message result as CSV, result is likely malformed.");
}
csvWriter.close();
}
use of au.com.bytecode.opencsv.CSVWriter in project OpenRefine by OpenRefine.
the class CsvExporter method export.
@Override
public void export(Project project, Properties params, Engine engine, final Writer writer) throws IOException {
String optionsString = (params == null) ? null : params.getProperty("options");
JSONObject options = null;
if (optionsString != null) {
try {
options = ParsingUtilities.evaluateJsonStringToObject(optionsString);
} catch (JSONException e) {
// Ignore and keep options null.
}
}
final String separator = options == null ? Character.toString(this.separator) : JSONUtilities.getString(options, "separator", Character.toString(this.separator));
final String lineSeparator = options == null ? CSVWriter.DEFAULT_LINE_END : JSONUtilities.getString(options, "lineSeparator", CSVWriter.DEFAULT_LINE_END);
final boolean quoteAll = options == null ? false : JSONUtilities.getBoolean(options, "quoteAll", false);
final boolean printColumnHeader = (params != null && params.getProperty("printColumnHeader") != null) ? Boolean.parseBoolean(params.getProperty("printColumnHeader")) : true;
final CSVWriter csvWriter = new CSVWriter(writer, separator.charAt(0), CSVWriter.DEFAULT_QUOTE_CHARACTER, lineSeparator);
TabularSerializer serializer = new TabularSerializer() {
@Override
public void startFile(JSONObject options) {
}
@Override
public void endFile() {
}
@Override
public void addRow(List<CellData> cells, boolean isHeader) {
if (!isHeader || printColumnHeader) {
String[] strings = new String[cells.size()];
for (int i = 0; i < strings.length; i++) {
CellData cellData = cells.get(i);
strings[i] = (cellData != null && cellData.text != null) ? cellData.text : "";
}
csvWriter.writeNext(strings, quoteAll);
}
}
};
CustomizableTabularExporterUtilities.exportRows(project, engine, params, serializer);
csvWriter.close();
}
Aggregations