use of org.apache.commons.csv.CSVRecord in project activityinfo by bedatadriven.
the class TableDataParser method exportedDataTableFromCsvFile.
public static DataTable exportedDataTableFromCsvFile(File file) throws IOException {
List<List<String>> rows = new ArrayList<>();
CSVParser parser = null;
try (Reader reader = new FileReader(file)) {
parser = CSVFormat.EXCEL.parse(reader);
for (CSVRecord record : parser.getRecords()) {
List<String> row = Lists.newArrayList();
for (int i = 0; i < record.size(); i++) {
row.add(record.get(i));
}
rows.add(row);
}
} finally {
if (parser != null) {
parser.close();
}
}
return DataTable.create(rows);
}
use of org.apache.commons.csv.CSVRecord in project components by Talend.
the class FixedDatasetRuntime method getValues.
public List<IndexedRecord> getValues(int limit) {
List<IndexedRecord> values = new ArrayList<>();
switch(properties.format.getValue()) {
case CSV:
try {
CsvRecordToIndexedRecordConverter converter = new CsvRecordToIndexedRecordConverter(getSchema());
for (CSVRecord r : //
CSVFormat.RFC4180.withDelimiter(//
properties.getFieldDelimiter().charAt(0)).withRecordSeparator(properties.getRecordDelimiter()).parse(new StringReader(properties.values.getValue()))) values.add(converter.convertToAvro(r));
} catch (IOException e) {
throw LocalIOErrorCode.createCannotParseSchema(e, properties.values.getValue());
}
break;
case JSON:
ObjectMapper mapper = new ObjectMapper();
JsonSchemaInferrer jsonSchemaInferrer = new JsonSchemaInferrer(mapper);
JsonGenericRecordConverter converter = null;
JsonFactory jsonFactory = new JsonFactory();
try (StringReader r = new StringReader(properties.values.getValue())) {
Iterator<JsonNode> value = mapper.readValues(jsonFactory.createParser(r), JsonNode.class);
int count = 0;
while (value.hasNext() && count++ < limit) {
String json = value.next().toString();
if (converter == null) {
Schema jsonSchema = jsonSchemaInferrer.inferSchema(json);
converter = new JsonGenericRecordConverter(jsonSchema);
}
values.add(converter.convertToAvro(json));
}
} catch (IOException e) {
throw LocalIOErrorCode.createCannotParseJson(e, properties.schema.getValue(), properties.values.getValue());
}
break;
case AVRO:
Schema schema = getSchema();
if (isRandom()) {
GeneratorFunction<IndexedRecord> gf = (GeneratorFunction<IndexedRecord>) GeneratorFunctions.of(getSchema());
GeneratorFunction.GeneratorContext ctx = GeneratorFunction.GeneratorContext.of(0, 0L);
for (int i = 0; i < limit; i++) {
ctx.setRowId(i);
values.add(gf.apply(ctx));
}
} else {
try (ByteArrayInputStream bais = new ByteArrayInputStream(properties.values.getValue().trim().getBytes())) {
JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, bais);
DatumReader<IndexedRecord> reader = new GenericDatumReader<>(schema);
int count = 0;
while (count++ < limit) {
values.add(reader.read(null, decoder));
}
} catch (EOFException e) {
// Indicates the end of the values.
} catch (IOException e) {
throw LocalIOErrorCode.createCannotParseAvroJson(e, properties.schema.getValue(), properties.values.getValue());
}
}
break;
}
return values;
}
use of org.apache.commons.csv.CSVRecord in project lumberjack by fn-ctional.
the class WebBackend method parseDeviceCSV.
public List<Device> parseDeviceCSV(MultipartFile csv) throws FileUploadException {
Iterable<CSVRecord> records = multipartFileToRecords(csv);
List<Device> newDevices = new ArrayList<>();
for (CSVRecord record : records) {
Device newDevice = new Device();
newDevice.setScanValue(record.get("scan value"));
newDevice.setAvailable(Boolean.parseBoolean(record.get("can remove")));
newDevice.setRuleID(record.get("rule id"));
newDevice.setCurrentlyAssigned(Boolean.parseBoolean(record.get("can remove")));
newDevice.setType(record.get("scan value"));
newDevice.setId(UUID.randomUUID().toString());
newDevices.add(newDevice);
}
return newDevices;
}
use of org.apache.commons.csv.CSVRecord in project lumberjack by fn-ctional.
the class WebBackend method parseUserCSV.
public List<User> parseUserCSV(MultipartFile csv) throws FileUploadException {
Iterable<CSVRecord> records = multipartFileToRecords(csv);
List<User> newUsers = new ArrayList<>();
for (CSVRecord record : records) {
User newUser = new User();
newUser.setScanValue(record.get("scan value"));
try {
newUser.setDeviceLimit(Integer.parseInt(record.get("device limit")));
} catch (NumberFormatException e) {
newUser.setDeviceLimit(0);
}
try {
newUser.setDevicesRemoved(Integer.parseInt(record.get("devices removed")));
} catch (NumberFormatException e) {
newUser.setDevicesRemoved(0);
}
newUser.setCanRemove(Boolean.parseBoolean(record.get("can remove")));
newUser.setGroupId(record.get("group id"));
newUser.setId(UUID.randomUUID().toString());
newUsers.add(newUser);
}
return newUsers;
}
use of org.apache.commons.csv.CSVRecord in project tutorials by eugenp.
the class CSVReaderWriterTest method givenCSVFile_whenRead_thenContentsAsExpected.
@Test
public void givenCSVFile_whenRead_thenContentsAsExpected() throws IOException {
Reader in = new FileReader("src/test/resources/book.csv");
Iterable<CSVRecord> records = CSVFormat.DEFAULT.withHeader(HEADERS).withFirstRecordAsHeader().parse(in);
for (CSVRecord record : records) {
String author = record.get("author");
String title = record.get("title");
assertEquals(AUTHOR_BOOK_MAP.get(author), title);
}
}
Aggregations