Search in sources :

Example 1 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project graylog2-server by Graylog2.

the class ScrollChunkWriter method writeTo.

@Override
public void writeTo(ScrollResult.ScrollChunk scrollChunk, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("[{}] Writing chunk {}", Thread.currentThread().getId(), scrollChunk.getChunkNumber());
    }
    final CSVWriter csvWriter = new CSVWriter(new OutputStreamWriter(entityStream, StandardCharsets.UTF_8));
    final List<String> fields = scrollChunk.getFields();
    final int numberOfFields = fields.size();
    if (scrollChunk.isFirstChunk()) {
        // write field headers only on first chunk
        csvWriter.writeNext(fields.toArray(new String[numberOfFields]));
    }
    // write result set in same order as the header row
    final String[] fieldValues = new String[numberOfFields];
    for (ResultMessage message : scrollChunk.getMessages()) {
        int idx = 0;
        // first collect all values from the current message
        for (String fieldName : fields) {
            final Object val = message.getMessage().getField(fieldName);
            if (val == null) {
                fieldValues[idx] = null;
            } else {
                String stringVal = val.toString();
                fieldValues[idx] = stringVal.replaceAll("\n", "\\\\n").replaceAll("\r", "\\\\r");
            }
            idx++;
        }
        // write the complete line, some fields might not be present in the message, so there might be null values
        csvWriter.writeNext(fieldValues);
    }
    if (csvWriter.checkError()) {
        LOG.error("Encountered unspecified error when writing message result as CSV, result is likely malformed.");
    }
    csvWriter.close();
}
Also used : CSVWriter(au.com.bytecode.opencsv.CSVWriter) OutputStreamWriter(java.io.OutputStreamWriter) ResultMessage(org.graylog2.indexer.results.ResultMessage)

Example 2 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project searchcode-server by boyter.

the class IndexBaseRepoJob method logIndexed.

/**
     * Logs to the logs directory a formatted CSV of the supplied list strings
     */
private void logIndexed(String repoName, List<String[]> reportList) {
    try {
        CSVWriter writer = new CSVWriter(new FileWriter(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp"));
        writer.writeAll(reportList);
        writer.flush();
        writer.close();
        Path source = Paths.get(Singleton.getHelpers().getLogPath() + repoName + ".csv.tmp");
        Files.move(source, source.resolveSibling(repoName + ".csv"), StandardCopyOption.REPLACE_EXISTING);
    } catch (IOException ex) {
        Singleton.getLogger().warning("ERROR - caught a " + ex.getClass() + " in " + this.getClass() + " logIndexed for " + repoName + "\n with message: " + ex.getMessage());
    }
}
Also used : FileWriter(java.io.FileWriter) CSVWriter(au.com.bytecode.opencsv.CSVWriter) IOException(java.io.IOException)

Example 3 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project gatk-protected by broadinstitute.

the class DecomposeSingularValues method writeMatrix.

private void writeMatrix(final RealMatrix m, final File outputFilename) throws IOException {
    final List<String[]> textTable = new ArrayList<>();
    for (int i = 0; i < m.getRowDimension(); i++) {
        textTable.add(Arrays.stream(m.getRow(i)).mapToObj(Double::toString).toArray(String[]::new));
    }
    final FileWriter fw = new FileWriter(outputFilename);
    CSVWriter csvWriter = new CSVWriter(fw, '\t', CSVWriter.NO_QUOTE_CHARACTER);
    csvWriter.writeAll(textTable);
    csvWriter.flush();
    csvWriter.close();
}
Also used : FileWriter(java.io.FileWriter) ArrayList(java.util.ArrayList) CSVWriter(au.com.bytecode.opencsv.CSVWriter)

Example 4 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project hive by apache.

the class OpenCSVSerde method serialize.

@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
    final StructObjectInspector outputRowOI = (StructObjectInspector) objInspector;
    final List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
    if (outputFieldRefs.size() != numCols) {
        throw new SerDeException("Cannot serialize the object because there are " + outputFieldRefs.size() + " fields but the table has " + numCols + " columns.");
    }
    // Get all data out.
    for (int c = 0; c < numCols; c++) {
        final Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
        final ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();
        // The data must be of type String
        final StringObjectInspector fieldStringOI = (StringObjectInspector) fieldOI;
        // Convert the field to Java class String, because objects of String type
        // can be stored in String, Text, or some other classes.
        outputFields[c] = fieldStringOI.getPrimitiveJavaObject(field);
    }
    final StringWriter writer = new StringWriter();
    final CSVWriter csv = newWriter(writer, separatorChar, quoteChar, escapeChar);
    try {
        csv.writeNext(outputFields);
        csv.close();
        return new Text(writer.toString());
    } catch (final IOException ioe) {
        throw new SerDeException(ioe);
    }
}
Also used : StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) ObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector) StringWriter(java.io.StringWriter) CSVWriter(au.com.bytecode.opencsv.CSVWriter) Text(org.apache.hadoop.io.Text) IOException(java.io.IOException) StringObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector) StructObjectInspector(org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)

Example 5 with CSVWriter

use of au.com.bytecode.opencsv.CSVWriter in project OpenRefine by OpenRefine.

the class CsvExporter method export.

@Override
public void export(Project project, Properties params, Engine engine, final Writer writer) throws IOException {
    String optionsString = (params == null) ? null : params.getProperty("options");
    JSONObject options = null;
    if (optionsString != null) {
        try {
            options = ParsingUtilities.evaluateJsonStringToObject(optionsString);
        } catch (JSONException e) {
        // Ignore and keep options null.
        }
    }
    final String separator = options == null ? Character.toString(this.separator) : JSONUtilities.getString(options, "separator", Character.toString(this.separator));
    final String lineSeparator = options == null ? CSVWriter.DEFAULT_LINE_END : JSONUtilities.getString(options, "lineSeparator", CSVWriter.DEFAULT_LINE_END);
    final boolean quoteAll = options == null ? false : JSONUtilities.getBoolean(options, "quoteAll", false);
    final boolean printColumnHeader = (params != null && params.getProperty("printColumnHeader") != null) ? Boolean.parseBoolean(params.getProperty("printColumnHeader")) : true;
    final CSVWriter csvWriter = new CSVWriter(writer, separator.charAt(0), CSVWriter.DEFAULT_QUOTE_CHARACTER, lineSeparator);
    TabularSerializer serializer = new TabularSerializer() {

        @Override
        public void startFile(JSONObject options) {
        }

        @Override
        public void endFile() {
        }

        @Override
        public void addRow(List<CellData> cells, boolean isHeader) {
            if (!isHeader || printColumnHeader) {
                String[] strings = new String[cells.size()];
                for (int i = 0; i < strings.length; i++) {
                    CellData cellData = cells.get(i);
                    strings[i] = (cellData != null && cellData.text != null) ? cellData.text : "";
                }
                csvWriter.writeNext(strings, quoteAll);
            }
        }
    };
    CustomizableTabularExporterUtilities.exportRows(project, engine, params, serializer);
    csvWriter.close();
}
Also used : JSONObject(org.json.JSONObject) JSONException(org.json.JSONException) CSVWriter(au.com.bytecode.opencsv.CSVWriter) List(java.util.List)

Aggregations

CSVWriter (au.com.bytecode.opencsv.CSVWriter)16 FileWriter (java.io.FileWriter)6 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5 OutputStreamWriter (java.io.OutputStreamWriter)4 File (java.io.File)3 StringWriter (java.io.StringWriter)2 Measure (com.secupwn.aimsicd.data.model.Measure)1 SQLException (java.sql.SQLException)1 List (java.util.List)1 AtomicLong (java.util.concurrent.atomic.AtomicLong)1 ServletException (javax.servlet.ServletException)1 ObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector)1 StructObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector)1 StringObjectInspector (org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector)1 Text (org.apache.hadoop.io.Text)1 ResultMessage (org.graylog2.indexer.results.ResultMessage)1 ResultMessageSummary (org.graylog2.rest.models.messages.responses.ResultMessageSummary)1 DBSObject (org.jkiss.dbeaver.model.struct.DBSObject)1 DBSTypedObject (org.jkiss.dbeaver.model.struct.DBSTypedObject)1