Search in sources :

Example 91 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project pwm by pwm-project.

the class LocalDBUtility method importLocalDB.

private void importLocalDB(final InputStream inputStream, final Appendable out, final long totalBytes) throws PwmOperationalException, IOException {
    this.prepareForImport();
    importLineCounter = 0;
    if (totalBytes > 0) {
        writeStringToOut(out, "total bytes in localdb import source: " + totalBytes);
    }
    writeStringToOut(out, "beginning localdb import...");
    final Instant startTime = Instant.now();
    final TransactionSizeCalculator transactionCalculator = new TransactionSizeCalculator(new TransactionSizeCalculator.SettingsBuilder().setDurationGoal(new TimeDuration(100, TimeUnit.MILLISECONDS)).setMinTransactions(50).setMaxTransactions(5 * 1000).createSettings());
    final Map<LocalDB.DB, Map<String, String>> transactionMap = new HashMap<>();
    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
        transactionMap.put(loopDB, new TreeMap<>());
    }
    final CountingInputStream countingInputStream = new CountingInputStream(inputStream);
    final EventRateMeter eventRateMeter = new EventRateMeter(TimeDuration.MINUTE);
    final Timer statTimer = new Timer(true);
    statTimer.scheduleAtFixedRate(new TimerTask() {

        @Override
        public void run() {
            String output = "";
            if (totalBytes > 0) {
                final ProgressInfo progressInfo = new ProgressInfo(startTime, totalBytes, countingInputStream.getByteCount());
                output += progressInfo.debugOutput();
            } else {
                output += "recordsImported=" + importLineCounter;
            }
            output += ", avgTransactionSize=" + transactionCalculator.getTransactionSize() + ", recordsPerMinute=" + eventRateMeter.readEventRate().setScale(2, BigDecimal.ROUND_DOWN);
            writeStringToOut(out, output);
        }
    }, 30 * 1000, 30 * 1000);
    Reader csvReader = null;
    try {
        csvReader = new InputStreamReader(new GZIPInputStream(countingInputStream, GZIP_BUFFER_SIZE), PwmConstants.DEFAULT_CHARSET);
        for (final CSVRecord record : PwmConstants.DEFAULT_CSV_FORMAT.parse(csvReader)) {
            importLineCounter++;
            eventRateMeter.markEvents(1);
            final String dbNameRecordStr = record.get(0);
            final LocalDB.DB db = JavaHelper.readEnumFromString(LocalDB.DB.class, null, dbNameRecordStr);
            final String key = record.get(1);
            final String value = record.get(2);
            if (db == null) {
                writeStringToOut(out, "ignoring localdb import record #" + importLineCounter + ", invalid DB name '" + dbNameRecordStr + "'");
            } else {
                transactionMap.get(db).put(key, value);
                int cachedTransactions = 0;
                for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
                    cachedTransactions += transactionMap.get(loopDB).size();
                }
                if (cachedTransactions >= transactionCalculator.getTransactionSize()) {
                    final long startTxnTime = System.currentTimeMillis();
                    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
                        localDB.putAll(loopDB, transactionMap.get(loopDB));
                        transactionMap.get(loopDB).clear();
                    }
                    transactionCalculator.recordLastTransactionDuration(TimeDuration.fromCurrent(startTxnTime));
                }
            }
        }
    } finally {
        LOGGER.trace("import process completed");
        statTimer.cancel();
        IOUtils.closeQuietly(csvReader);
        IOUtils.closeQuietly(countingInputStream);
    }
    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
        localDB.putAll(loopDB, transactionMap.get(loopDB));
        transactionMap.get(loopDB).clear();
    }
    this.markImportComplete();
    writeStringToOut(out, "restore complete, restored " + importLineCounter + " records in " + TimeDuration.fromCurrent(startTime).asLongString());
    statTimer.cancel();
}
Also used : InputStreamReader(java.io.InputStreamReader) TransactionSizeCalculator(password.pwm.util.TransactionSizeCalculator) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Instant(java.time.Instant) CountingInputStream(org.apache.commons.io.input.CountingInputStream) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) EventRateMeter(password.pwm.svc.stats.EventRateMeter) GZIPInputStream(java.util.zip.GZIPInputStream) Timer(java.util.Timer) TimerTask(java.util.TimerTask) ProgressInfo(password.pwm.util.ProgressInfo) TimeDuration(password.pwm.util.java.TimeDuration) CSVRecord(org.apache.commons.csv.CSVRecord) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) Map(java.util.Map) TreeMap(java.util.TreeMap)

Example 92 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project phoenix by apache.

the class CsvToKeyValueMapperTest method testCsvLineParser.

@Test
public void testCsvLineParser() throws IOException {
    CsvToKeyValueMapper.CsvLineParser lineParser = new CsvToKeyValueMapper.CsvLineParser(';', '"', '\\');
    CSVRecord parsed = lineParser.parse("one;two");
    assertEquals("one", parsed.get(0));
    assertEquals("two", parsed.get(1));
    assertTrue(parsed.isConsistent());
    assertEquals(1, parsed.getRecordNumber());
}
Also used : CSVRecord(org.apache.commons.csv.CSVRecord) Test(org.junit.Test)

Example 93 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project devonfw-testing by devonfw.

the class SpreadsheetEnvironmentService method updateServicesMapBasedOn.

private void updateServicesMapBasedOn(String environmentName) {
    services = new HashMap<String, String>();
    int environmentNumber = getEnvironmentNumber(environmentName);
    Iterator<CSVRecord> it = records.iterator();
    // first row contains table headers, so skip it
    it.next();
    while (it.hasNext()) {
        CSVRecord record = it.next();
        String key = record.get(0);
        String value = record.get(environmentNumber).trim();
        value = optionalDecrypt(value);
        services.put(key, value);
    }
}
Also used : CSVRecord(org.apache.commons.csv.CSVRecord)

Example 94 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project ksql by confluentinc.

the class KsqlDelimitedDeserializer method deserialize.

@Override
public GenericRow deserialize(final String topic, final byte[] bytes) {
    if (bytes == null) {
        return null;
    }
    String recordCsvString = new String(bytes, StandardCharsets.UTF_8);
    try {
        List<CSVRecord> csvRecords = CSVParser.parse(recordCsvString, CSVFormat.DEFAULT).getRecords();
        if (csvRecords == null || csvRecords.isEmpty()) {
            throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
        }
        CSVRecord csvRecord = csvRecords.get(0);
        if (csvRecord == null || csvRecord.size() == 0) {
            throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
        }
        List<Object> columns = new ArrayList<>();
        if (csvRecord.size() != schema.fields().size()) {
            throw new KsqlException(String.format("Unexpected field count, csvFields:%d schemaFields:%d line: %s", csvRecord.size(), schema.fields().size(), recordCsvString));
        }
        for (int i = 0; i < csvRecord.size(); i++) {
            if (csvRecord.get(i) == null) {
                columns.add(null);
            } else {
                columns.add(enforceFieldType(schema.fields().get(i).schema(), csvRecord.get(i)));
            }
        }
        return new GenericRow(columns);
    } catch (Exception e) {
        throw new SerializationException("Exception in deserializing the delimited row: " + recordCsvString, e);
    }
}
Also used : GenericRow(io.confluent.ksql.GenericRow) SerializationException(org.apache.kafka.common.errors.SerializationException) ArrayList(java.util.ArrayList) CSVRecord(org.apache.commons.csv.CSVRecord) KsqlException(io.confluent.ksql.util.KsqlException) SerializationException(org.apache.kafka.common.errors.SerializationException) KsqlException(io.confluent.ksql.util.KsqlException)

Example 95 with CSVRecord

use of org.apache.commons.csv.CSVRecord in project pink-panthers by MrTrai.

the class Parser method createShelters.

public List<Shelter> createShelters() {
    CSVParser reader = null;
    try {
        reader = new CSVParser(file, CSVFormat.DEFAULT.withHeader());
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(1);
    }
    List<Shelter> shelters = new ArrayList<>();
    Db db = new Db("pinkpanther", "PinkPantherReturns!", "pinkpanther");
    for (CSVRecord record : reader) {
        Shelter shelter = db.createShelter(record.get("Shelter Name"), record.get("Capacity"), record.get("Special Notes"), Double.valueOf(record.get("Latitude")), Double.valueOf(record.get("Longitude")), record.get("Phone Number"), record.get("Restrictions"), record.get("Address"));
        shelters.add(shelter);
    }
    return shelters;
}
Also used : CSVParser(org.apache.commons.csv.CSVParser) ArrayList(java.util.ArrayList) CSVRecord(org.apache.commons.csv.CSVRecord) IOException(java.io.IOException)

Aggregations

CSVRecord (org.apache.commons.csv.CSVRecord)127 CSVParser (org.apache.commons.csv.CSVParser)71 IOException (java.io.IOException)40 CSVFormat (org.apache.commons.csv.CSVFormat)40 ArrayList (java.util.ArrayList)36 Reader (java.io.Reader)24 StringReader (java.io.StringReader)22 InputStreamReader (java.io.InputStreamReader)18 FileReader (java.io.FileReader)16 Test (org.junit.Test)14 Path (java.nio.file.Path)13 HashMap (java.util.HashMap)11 File (java.io.File)10 PreparedStatement (java.sql.PreparedStatement)10 InputStream (java.io.InputStream)9 ResultSet (java.sql.ResultSet)9 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)9 CSVCommonsLoader (org.apache.phoenix.util.CSVCommonsLoader)9 BufferedReader (java.io.BufferedReader)8 Map (java.util.Map)7