use of org.apache.commons.csv.CSVRecord in project pwm by pwm-project.
the class LocalDBUtility method importLocalDB.
private void importLocalDB(final InputStream inputStream, final Appendable out, final long totalBytes) throws PwmOperationalException, IOException {
this.prepareForImport();
importLineCounter = 0;
if (totalBytes > 0) {
writeStringToOut(out, "total bytes in localdb import source: " + totalBytes);
}
writeStringToOut(out, "beginning localdb import...");
final Instant startTime = Instant.now();
final TransactionSizeCalculator transactionCalculator = new TransactionSizeCalculator(new TransactionSizeCalculator.SettingsBuilder().setDurationGoal(new TimeDuration(100, TimeUnit.MILLISECONDS)).setMinTransactions(50).setMaxTransactions(5 * 1000).createSettings());
final Map<LocalDB.DB, Map<String, String>> transactionMap = new HashMap<>();
for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
transactionMap.put(loopDB, new TreeMap<>());
}
final CountingInputStream countingInputStream = new CountingInputStream(inputStream);
final EventRateMeter eventRateMeter = new EventRateMeter(TimeDuration.MINUTE);
final Timer statTimer = new Timer(true);
statTimer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
String output = "";
if (totalBytes > 0) {
final ProgressInfo progressInfo = new ProgressInfo(startTime, totalBytes, countingInputStream.getByteCount());
output += progressInfo.debugOutput();
} else {
output += "recordsImported=" + importLineCounter;
}
output += ", avgTransactionSize=" + transactionCalculator.getTransactionSize() + ", recordsPerMinute=" + eventRateMeter.readEventRate().setScale(2, BigDecimal.ROUND_DOWN);
writeStringToOut(out, output);
}
}, 30 * 1000, 30 * 1000);
Reader csvReader = null;
try {
csvReader = new InputStreamReader(new GZIPInputStream(countingInputStream, GZIP_BUFFER_SIZE), PwmConstants.DEFAULT_CHARSET);
for (final CSVRecord record : PwmConstants.DEFAULT_CSV_FORMAT.parse(csvReader)) {
importLineCounter++;
eventRateMeter.markEvents(1);
final String dbNameRecordStr = record.get(0);
final LocalDB.DB db = JavaHelper.readEnumFromString(LocalDB.DB.class, null, dbNameRecordStr);
final String key = record.get(1);
final String value = record.get(2);
if (db == null) {
writeStringToOut(out, "ignoring localdb import record #" + importLineCounter + ", invalid DB name '" + dbNameRecordStr + "'");
} else {
transactionMap.get(db).put(key, value);
int cachedTransactions = 0;
for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
cachedTransactions += transactionMap.get(loopDB).size();
}
if (cachedTransactions >= transactionCalculator.getTransactionSize()) {
final long startTxnTime = System.currentTimeMillis();
for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
localDB.putAll(loopDB, transactionMap.get(loopDB));
transactionMap.get(loopDB).clear();
}
transactionCalculator.recordLastTransactionDuration(TimeDuration.fromCurrent(startTxnTime));
}
}
}
} finally {
LOGGER.trace("import process completed");
statTimer.cancel();
IOUtils.closeQuietly(csvReader);
IOUtils.closeQuietly(countingInputStream);
}
for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
localDB.putAll(loopDB, transactionMap.get(loopDB));
transactionMap.get(loopDB).clear();
}
this.markImportComplete();
writeStringToOut(out, "restore complete, restored " + importLineCounter + " records in " + TimeDuration.fromCurrent(startTime).asLongString());
statTimer.cancel();
}
use of org.apache.commons.csv.CSVRecord in project phoenix by apache.
the class CsvToKeyValueMapperTest method testCsvLineParser.
@Test
public void testCsvLineParser() throws IOException {
CsvToKeyValueMapper.CsvLineParser lineParser = new CsvToKeyValueMapper.CsvLineParser(';', '"', '\\');
CSVRecord parsed = lineParser.parse("one;two");
assertEquals("one", parsed.get(0));
assertEquals("two", parsed.get(1));
assertTrue(parsed.isConsistent());
assertEquals(1, parsed.getRecordNumber());
}
use of org.apache.commons.csv.CSVRecord in project devonfw-testing by devonfw.
the class SpreadsheetEnvironmentService method updateServicesMapBasedOn.
private void updateServicesMapBasedOn(String environmentName) {
services = new HashMap<String, String>();
int environmentNumber = getEnvironmentNumber(environmentName);
Iterator<CSVRecord> it = records.iterator();
// first row contains table headers, so skip it
it.next();
while (it.hasNext()) {
CSVRecord record = it.next();
String key = record.get(0);
String value = record.get(environmentNumber).trim();
value = optionalDecrypt(value);
services.put(key, value);
}
}
use of org.apache.commons.csv.CSVRecord in project ksql by confluentinc.
the class KsqlDelimitedDeserializer method deserialize.
@Override
public GenericRow deserialize(final String topic, final byte[] bytes) {
if (bytes == null) {
return null;
}
String recordCsvString = new String(bytes, StandardCharsets.UTF_8);
try {
List<CSVRecord> csvRecords = CSVParser.parse(recordCsvString, CSVFormat.DEFAULT).getRecords();
if (csvRecords == null || csvRecords.isEmpty()) {
throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
}
CSVRecord csvRecord = csvRecords.get(0);
if (csvRecord == null || csvRecord.size() == 0) {
throw new KsqlException("Deserialization error in the delimited line: " + recordCsvString);
}
List<Object> columns = new ArrayList<>();
if (csvRecord.size() != schema.fields().size()) {
throw new KsqlException(String.format("Unexpected field count, csvFields:%d schemaFields:%d line: %s", csvRecord.size(), schema.fields().size(), recordCsvString));
}
for (int i = 0; i < csvRecord.size(); i++) {
if (csvRecord.get(i) == null) {
columns.add(null);
} else {
columns.add(enforceFieldType(schema.fields().get(i).schema(), csvRecord.get(i)));
}
}
return new GenericRow(columns);
} catch (Exception e) {
throw new SerializationException("Exception in deserializing the delimited row: " + recordCsvString, e);
}
}
use of org.apache.commons.csv.CSVRecord in project pink-panthers by MrTrai.
the class Parser method createShelters.
public List<Shelter> createShelters() {
CSVParser reader = null;
try {
reader = new CSVParser(file, CSVFormat.DEFAULT.withHeader());
} catch (IOException e) {
e.printStackTrace();
System.exit(1);
}
List<Shelter> shelters = new ArrayList<>();
Db db = new Db("pinkpanther", "PinkPantherReturns!", "pinkpanther");
for (CSVRecord record : reader) {
Shelter shelter = db.createShelter(record.get("Shelter Name"), record.get("Capacity"), record.get("Special Notes"), Double.valueOf(record.get("Latitude")), Double.valueOf(record.get("Longitude")), record.get("Phone Number"), record.get("Restrictions"), record.get("Address"));
shelters.add(shelter);
}
return shelters;
}
Aggregations