Search in sources :

Example 6 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project mica2 by obiba.

the class CsvTaxonomyCoverageWriter method write.

public ByteArrayOutputStream write(MicaSearch.TaxonomiesCoverageDto coverage) throws IOException {
    ByteArrayOutputStream values = new ByteArrayOutputStream();
    CSVWriter writer = null;
    try {
        writer = new CSVWriter(new PrintWriter(values));
        List<String> bucketNames = writeHeader(writer, coverage);
        writeBody(writer, coverage, bucketNames);
    } finally {
        if (writer != null)
            writer.close();
    }
    return values;
}
Also used : CSVWriter(au.com.bytecode.opencsv.CSVWriter) ByteArrayOutputStream(java.io.ByteArrayOutputStream) PrintWriter(java.io.PrintWriter)

Example 7 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project mica2 by obiba.

the class CsvReportGeneratorImpl method write.

public void write(OutputStream outputStream) {
    try (CSVWriter writer = new CSVWriter(new PrintWriter(new OutputStreamWriter(outputStream, "UTF-8")))) {
        writeHeader(writer);
        writeEachLine(writer);
        outputStream.flush();
    } catch (IOException e) {
        log.error("CSV report extraction failed", e);
        throw new UncheckedIOException(e);
    } catch (Exception e) {
        log.error("CSV report extraction failed", e);
    }
}
Also used : CSVWriter(au.com.bytecode.opencsv.CSVWriter)

Example 8 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project hive by apache.

the class OpenCSVSerde method serialize.

@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
    final StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
    final List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
    if (outputFieldRefs.size() != numCols) {
        throw new SerDeException("Cannot serialize the object because there are " + outputFieldRefs.size() + " fields but the table has " + numCols + " columns.");
    }
    // Get all data out.
    for (int c = 0; c < numCols; c++) {
        final Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
        final ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();
        // The data must be of type String
        final StringObjectInspector fieldStringOI = (StringObjectInspector) fieldOI;
        // Convert the field to Java class String, because objects of String type
        // can be stored in String, Text, or some other classes.
        outputFields[c] = fieldStringOI.getPrimitiveJavaObject(field);
    }
    final StringWriter writer = new StringWriter();
    final CSVWriter csv = newWriter(writer, separatorChar, quoteChar, escapeChar);
    try {
        csv.writeNext(outputFields);
        csv.close();
        return new Text(writer.toString());
    } catch (final IOException ioe) {
        throw new SerDeException(ioe);
    }
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StringWriter(java.io.StringWriter) CSVWriter(au.com.bytecode.opencsv.CSVWriter) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 9 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project graylog2-server by Graylog2.

the class SearchResponseCsvWriter method writeTo.

@Override
public void writeTo(SearchResponse searchResponse, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
    final CSVWriter csvWriter = new CSVWriter(new OutputStreamWriter(entityStream, StandardCharsets.UTF_8));
    final ImmutableSortedSet<String> sortedFields = ImmutableSortedSet.copyOf(Iterables.concat(searchResponse.fields(), Lists.newArrayList("source", "timestamp")));
    // write field headers
    csvWriter.writeNext(sortedFields.toArray(new String[sortedFields.size()]));
    // write result set in same order as the header row
    final String[] fieldValues = new String[sortedFields.size()];
    for (ResultMessageSummary message : searchResponse.messages()) {
        int idx = 0;
        // first collect all values from the current message
        for (String fieldName : sortedFields) {
            final Object val = message.message().get(fieldName);
            fieldValues[idx++] = ((val == null) ? null : val.toString().replaceAll("\n", "\\\\n"));
            fieldValues[idx++] = ((val == null) ? null : val.toString().replaceAll("\r", "\\\\r"));
        }
        // write the complete line, some fields might not be present in the message, so there might be null values
        csvWriter.writeNext(fieldValues);
    }
    if (csvWriter.checkError()) {
        LOG.error("Encountered unspecified error when writing message result as CSV, result is likely malformed.");
    }
    csvWriter.close();
}
Also used : CSVWriter(au.com.bytecode.opencsv.CSVWriter) OutputStreamWriter(java.io.OutputStreamWriter) ResultMessageSummary(org.graylog2.rest.models.messages.responses.ResultMessageSummary)

Example 10 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project OpenRefine by OpenRefine.

the class CsvExporter method export.

@Override
public void export(Project project, Properties params, Engine engine, final Writer writer) throws IOException {
    String optionsString = (params == null) ? null : params.getProperty("options");
    JSONObject options = null;
    if (optionsString != null) {
        try {
            options = ParsingUtilities.evaluateJsonStringToObject(optionsString);
        } catch (JSONException e) {
        // Ignore and keep options null.
        }
    }
    final String separator = options == null ? Character.toString(this.separator) : JSONUtilities.getString(options, "separator", Character.toString(this.separator));
    final String lineSeparator = options == null ? CSVWriter.DEFAULT_LINE_END : JSONUtilities.getString(options, "lineSeparator", CSVWriter.DEFAULT_LINE_END);
    final boolean quoteAll = options == null ? false : JSONUtilities.getBoolean(options, "quoteAll", false);
    final boolean printColumnHeader = (params != null && params.getProperty("printColumnHeader") != null) ? Boolean.parseBoolean(params.getProperty("printColumnHeader")) : true;
    final CSVWriter csvWriter = new CSVWriter(writer, separator.charAt(0), CSVWriter.DEFAULT_QUOTE_CHARACTER, lineSeparator);
    TabularSerializer serializer = new TabularSerializer() {

        @Override
        public void startFile(JSONObject options) {
        }

        @Override
        public void endFile() {
        }

        @Override
        public void addRow(List<CellData> cells, boolean isHeader) {
            if (!isHeader || printColumnHeader) {
                String[] strings = new String[cells.size()];
                for (int i = 0; i < strings.length; i++) {
                    CellData cellData = cells.get(i);
                    strings[i] = (cellData != null && cellData.text != null) ? cellData.text : "";
                }
                csvWriter.writeNext(strings, quoteAll);
            }
        }
    };
    CustomizableTabularExporterUtilities.exportRows(project, engine, params, serializer);
    csvWriter.close();
}
Also used : JSONObject(org.json.JSONObject) JSONException(org.json.JSONException) CSVWriter(au.com.bytecode.opencsv.CSVWriter) List(java.util.List)

Aggregations

CSVWriter (au.com.bytecode.opencsv.CSVWriter)23 IOException (java.io.IOException)9 FileWriter (java.io.FileWriter)7 PrintWriter (java.io.PrintWriter)5 ArrayList (java.util.ArrayList)5 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 OutputStreamWriter (java.io.OutputStreamWriter)4 File (java.io.File)3 StringWriter (java.io.StringWriter)2 List (java.util.List)2 Lists (com.google.common.collect.Lists)1 Measure (com.secupwn.aimsicd.data.model.Measure)1 DataflowException (edu.uci.ics.texera.api.exception.DataflowException)1 Tuple (edu.uci.ics.texera.api.tuple.Tuple)1 UncheckedIOException (java.io.UncheckedIOException)1 AtomicLong (java.util.concurrent.atomic.AtomicLong)1 Collectors (java.util.stream.Collectors)1 TableModel (javax.swing.table.TableModel)1 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)1 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)1