Search in sources :

Example 31 with CSVFormat

use of org.apache.commons.csv.CSVFormat in project Orthanc_Tools by salimkanoun.

the class AutoQuery method csvReading.

/**
 * CSV reader and inject value in table
 * @param file
 * @param table
 * @throws IOException
 */
protected void csvReading(File file, JTable table) throws IOException {
    CSVFormat csvFileFormat = CSVFormat.EXCEL.withFirstRecordAsHeader().withIgnoreEmptyLines();
    CSVParser csvParser = CSVParser.parse(file, StandardCharsets.UTF_8, csvFileFormat);
    // On met les records dans une list
    List<CSVRecord> csvRecord = csvParser.getRecords();
    // On balaie le fichier ligne par ligne
    int discarded = 0;
    Boolean error = false;
    for (int i = 0; i < csvRecord.size(); i++) {
        try {
            // On recupere les variables
            String name = csvRecord.get(i).get(0);
            String prenom = csvRecord.get(i).get(1);
            String id = csvRecord.get(i).get(2);
            String accession = csvRecord.get(i).get(3);
            String dateFrom = csvRecord.get(i).get(4);
            String dateTo = csvRecord.get(i).get(5);
            String modality = csvRecord.get(i).get(6);
            String studyDescription = csvRecord.get(i).get(7);
            // On les pousse dans le tableau
            DefaultTableModel model = (DefaultTableModel) table.getModel();
            model.addRow(new Object[] { name, prenom, id, accession, dateFrom, dateTo, modality, studyDescription });
        } catch (NullPointerException | ArrayIndexOutOfBoundsException e) {
            System.out.println("Error in line " + i + " discarding");
            discarded++;
            error = true;
        }
    }
    if (error)
        JOptionPane.showMessageDialog(null, discarded + " lines discarded, see console for more details", "Wrong Input", JOptionPane.WARNING_MESSAGE);
}
Also used : CSVParser(org.apache.commons.csv.CSVParser) DefaultTableModel(javax.swing.table.DefaultTableModel) CSVFormat(org.apache.commons.csv.CSVFormat) CSVRecord(org.apache.commons.csv.CSVRecord)

Example 32 with CSVFormat

use of org.apache.commons.csv.CSVFormat in project symja_android_library by axkr.

the class ImportString method evaluate.

@Override
public IExpr evaluate(final IAST ast, EvalEngine engine) {
    if (!(ast.arg1() instanceof IStringX)) {
        return F.NIL;
    }
    String str1 = ((IStringX) ast.arg1()).toString();
    Extension format = Extension.TXT;
    if (ast.size() > 2) {
        if (!(ast.arg2() instanceof IStringX)) {
            return F.NIL;
        }
        format = Extension.importExtension(((IStringX) ast.arg2()).toString());
    }
    try {
        switch(format) {
            case JSON:
                return JSONConvert.importJSON(str1);
            case EXPRESSIONJSON:
                return ExpressionJSONConvert.importExpressionJSON(str1);
            case TABLE:
                AST2Expr ast2Expr = new AST2Expr(engine.isRelaxedSyntax(), engine);
                final Parser parser = new Parser(engine.isRelaxedSyntax(), true);
                CSVFormat csvFormat = CSVFormat.RFC4180.withDelimiter(',');
                Iterable<CSVRecord> records = csvFormat.parse(new StringReader(str1));
                IASTAppendable rowList = F.ListAlloc(256);
                for (CSVRecord record : records) {
                    IASTAppendable columnList = F.ListAlloc(record.size());
                    for (String string : record) {
                        final ASTNode node = parser.parse(string);
                        IExpr temp = ast2Expr.convert(node);
                        columnList.append(temp);
                    }
                    rowList.append(columnList);
                }
                return rowList;
            case STRING:
                return ofString(str1, engine);
            case TXT:
                return ofText(str1, engine);
            default:
        }
    } catch (SyntaxError se) {
        LOGGER.log(engine.getLogLevel(), "ImportString: syntax error!", se);
    } catch (Exception ex) {
        LOGGER.log(engine.getLogLevel(), "ImportString", ex);
    }
    return F.NIL;
}
Also used : AST2Expr(org.matheclipse.core.convert.AST2Expr) Parser(org.matheclipse.parser.client.Parser) Extension(org.matheclipse.core.io.Extension) IASTAppendable(org.matheclipse.core.interfaces.IASTAppendable) SyntaxError(org.matheclipse.parser.client.SyntaxError) StringReader(java.io.StringReader) ASTNode(org.matheclipse.parser.client.ast.ASTNode) CSVFormat(org.apache.commons.csv.CSVFormat) CSVRecord(org.apache.commons.csv.CSVRecord) IStringX(org.matheclipse.core.interfaces.IStringX) IExpr(org.matheclipse.core.interfaces.IExpr)

Example 33 with CSVFormat

use of org.apache.commons.csv.CSVFormat in project beam by apache.

the class SqlTransformRunner method runUsingSqlTransform.

/**
 * This is the default method in BeamTpcds.main method. Run job using SqlTranform.query() method.
 *
 * @param args Command line arguments
 * @throws Exception
 */
public static void runUsingSqlTransform(String[] args) throws Exception {
    TpcdsOptions tpcdsOptions = PipelineOptionsFactory.fromArgs(args).withValidation().as(TpcdsOptions.class);
    String dataSize = TpcdsParametersReader.getAndCheckDataSize(tpcdsOptions);
    String[] queryNames = TpcdsParametersReader.getAndCheckQueryNames(tpcdsOptions);
    int nThreads = TpcdsParametersReader.getAndCheckTpcParallel(tpcdsOptions);
    // Using ExecutorService and CompletionService to fulfill multi-threading functionality
    ExecutorService executor = Executors.newFixedThreadPool(nThreads);
    CompletionService<TpcdsRunResult> completion = new ExecutorCompletionService<>(executor);
    // Make an array of pipelines, each pipeline is responsible for running a corresponding query.
    Pipeline[] pipelines = new Pipeline[queryNames.length];
    CSVFormat csvFormat = CSVFormat.MYSQL.withDelimiter('|').withNullString("");
    // the txt file and store in a GCP directory.
    for (int i = 0; i < queryNames.length; i++) {
        // For each query, get a copy of pipelineOptions from command line arguments.
        TpcdsOptions tpcdsOptionsCopy = PipelineOptionsFactory.fromArgs(args).withValidation().as(TpcdsOptions.class);
        // Set a unique job name using the time stamp so that multiple different pipelines can run
        // together.
        tpcdsOptionsCopy.setJobName(queryNames[i] + "result" + System.currentTimeMillis());
        pipelines[i] = Pipeline.create(tpcdsOptionsCopy);
        String queryString = QueryReader.readQuery(queryNames[i]);
        PCollectionTuple tables = getTables(pipelines[i], csvFormat, queryNames[i]);
        try {
            tables.apply(SqlTransform.query(queryString)).apply(MapElements.into(TypeDescriptors.strings()).via(Row::toString)).apply(TextIO.write().to(tpcdsOptions.getResultsDirectory() + "/" + dataSize + "/" + pipelines[i].getOptions().getJobName()).withSuffix(".txt").withNumShards(1));
        } catch (Exception e) {
            LOG.error("{} failed to execute", queryNames[i]);
            e.printStackTrace();
        }
        completion.submit(new TpcdsRun(pipelines[i]));
    }
    executor.shutdown();
    printExecutionSummary(completion, queryNames.length);
}
Also used : ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) IOException(java.io.IOException) Pipeline(org.apache.beam.sdk.Pipeline) ExecutorService(java.util.concurrent.ExecutorService) PCollectionTuple(org.apache.beam.sdk.values.PCollectionTuple) CSVFormat(org.apache.commons.csv.CSVFormat) Row(org.apache.beam.sdk.values.Row)

Example 34 with CSVFormat

use of org.apache.commons.csv.CSVFormat in project jgnash by ccavanaugh.

the class CsvExport method exportAccountTree.

public static void exportAccountTree(@NotNull final Engine engine, @NotNull final Path path) {
    Objects.requireNonNull(engine);
    Objects.requireNonNull(path);
    // force a correct file extension
    final String fileName = FileUtils.stripFileExtension(path.toString()) + ".csv";
    final CSVFormat csvFormat = CSVFormat.EXCEL.withQuoteMode(QuoteMode.ALL);
    try (final OutputStreamWriter outputStreamWriter = new OutputStreamWriter(Files.newOutputStream(Paths.get(fileName)), StandardCharsets.UTF_8);
        final CSVPrinter writer = new CSVPrinter(new BufferedWriter(outputStreamWriter), csvFormat)) {
        // write UTF-8 byte order mark to the file for easier imports
        outputStreamWriter.write(BYTE_ORDER_MARK);
        writer.printRecord(ResourceUtils.getString("Column.Account"), ResourceUtils.getString("Column.Code"), ResourceUtils.getString("Column.Entries"), ResourceUtils.getString("Column.Balance"), ResourceUtils.getString("Column.ReconciledBalance"), ResourceUtils.getString("Column.Currency"), ResourceUtils.getString("Column.Type"));
        // Create a list sorted by depth and account code and then name if code is not specified
        final List<Account> accountList = engine.getAccountList();
        accountList.sort(Comparators.getAccountByTreePosition(Comparators.getAccountByCode()));
        final CurrencyNode currencyNode = engine.getDefaultCurrency();
        final LocalDate today = LocalDate.now();
        for (final Account account : accountList) {
            final String indentedName = SPACE.repeat((account.getDepth() - 1) * INDENT) + account.getName();
            final String balance = account.getTreeBalance(today, currencyNode).toPlainString();
            final String reconcileBalance = account.getReconciledTreeBalance().toPlainString();
            writer.printRecord(indentedName, String.valueOf(account.getAccountCode()), String.valueOf(account.getTransactionCount()), balance, reconcileBalance, account.getCurrencyNode().getSymbol(), account.getAccountType().toString());
        }
    } catch (final IOException e) {
        Logger.getLogger(CsvExport.class.getName()).log(Level.SEVERE, e.getLocalizedMessage(), e);
    }
}
Also used : CSVPrinter(org.apache.commons.csv.CSVPrinter) CurrencyNode(jgnash.engine.CurrencyNode) Account(jgnash.engine.Account) CSVFormat(org.apache.commons.csv.CSVFormat) OutputStreamWriter(java.io.OutputStreamWriter) IOException(java.io.IOException) LocalDate(java.time.LocalDate) BufferedWriter(java.io.BufferedWriter)

Example 35 with CSVFormat

use of org.apache.commons.csv.CSVFormat in project midpoint by Evolveum.

the class ImportController method parseColumnsAsVariablesFromFile.

public List<VariablesMap> parseColumnsAsVariablesFromFile(ReportDataType reportData) throws IOException {
    List<String> headers = new ArrayList<>();
    Reader reader = Files.newBufferedReader(Paths.get(reportData.getFilePath()));
    CSVFormat csvFormat = support.createCsvFormat();
    if (compiledCollection != null) {
        Class<ObjectType> type = compiledCollection.getTargetClass(reportService.getPrismContext());
        if (type == null) {
            throw new IllegalArgumentException("Couldn't define type of imported objects");
        }
        PrismObjectDefinition<?> def = reportService.getPrismContext().getSchemaRegistry().findItemDefinitionByCompileTimeClass(type, PrismObjectDefinition.class);
        for (GuiObjectColumnType column : columns) {
            Validate.notNull(column.getName(), "Name of column is null");
            String label = GenericSupport.getLabel(column, def, localizationService);
            headers.add(label);
        }
    } else {
        csvFormat = csvFormat.withFirstRecordAsHeader();
    }
    if (support.isHeader()) {
        if (!headers.isEmpty()) {
            String[] arrayHeader = new String[headers.size()];
            arrayHeader = headers.toArray(arrayHeader);
            csvFormat = csvFormat.withHeader(arrayHeader);
        }
        csvFormat = csvFormat.withSkipHeaderRecord(true);
    } else {
        if (headers.isEmpty()) {
            throw new IllegalArgumentException("Couldn't find headers please " + "define them via view element or write them to csv file and set " + "header element in file format configuration to true.");
        }
        csvFormat = csvFormat.withSkipHeaderRecord(false);
    }
    CSVParser csvParser = new CSVParser(reader, csvFormat);
    if (headers.isEmpty()) {
        headers = csvParser.getHeaderNames();
    }
    List<VariablesMap> variablesMaps = new ArrayList<>();
    for (CSVRecord csvRecord : csvParser) {
        VariablesMap variables = new VariablesMap();
        for (String name : headers) {
            String value;
            if (support.isHeader()) {
                value = csvRecord.get(name);
            } else {
                value = csvRecord.get(headers.indexOf(name));
            }
            if (value != null && value.isEmpty()) {
                value = null;
            }
            if (value != null && value.contains(support.getMultivalueDelimiter())) {
                String[] realValues = value.split(support.getMultivalueDelimiter());
                variables.put(name, Arrays.asList(realValues), String.class);
            } else {
                variables.put(name, value, String.class);
            }
        }
        variablesMaps.add(variables);
    }
    return variablesMaps;
}
Also used : Reader(java.io.Reader) CSVParser(org.apache.commons.csv.CSVParser) CSVFormat(org.apache.commons.csv.CSVFormat) VariablesMap(com.evolveum.midpoint.schema.expression.VariablesMap) CSVRecord(org.apache.commons.csv.CSVRecord)

Aggregations

CSVFormat (org.apache.commons.csv.CSVFormat)59 IOException (java.io.IOException)23 CSVRecord (org.apache.commons.csv.CSVRecord)22 CSVParser (org.apache.commons.csv.CSVParser)19 ArrayList (java.util.ArrayList)14 StringReader (java.io.StringReader)13 CSVPrinter (org.apache.commons.csv.CSVPrinter)10 InputStream (java.io.InputStream)9 InputStreamReader (java.io.InputStreamReader)8 HashMap (java.util.HashMap)8 SimpleRecordSchema (org.apache.nifi.serialization.SimpleRecordSchema)8 RecordField (org.apache.nifi.serialization.record.RecordField)8 RecordSchema (org.apache.nifi.serialization.record.RecordSchema)8 Test (org.junit.Test)8 ByteArrayOutputStream (java.io.ByteArrayOutputStream)7 Reader (java.io.Reader)7 LinkedHashMap (java.util.LinkedHashMap)7 SchemaNameAsAttribute (org.apache.nifi.schema.access.SchemaNameAsAttribute)7 MapRecord (org.apache.nifi.serialization.record.MapRecord)7 Record (org.apache.nifi.serialization.record.Record)7