use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project goci by EBISPOT.
the class FileHandler method getStudyPatchRequests.
public static List<StudyPatchRequest> getStudyPatchRequests(FileUploadRequest fileUploadRequest) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = getSchemaFromMultiPartFile(fileUploadRequest.getMultipartFile());
List<StudyPatchRequest> studyPatchRequests;
try {
InputStream inputStream = fileUploadRequest.getMultipartFile().getInputStream();
MappingIterator<StudyPatchRequest> iterator = mapper.readerFor(StudyPatchRequest.class).with(schema).readValues(inputStream);
studyPatchRequests = iterator.readAll();
} catch (IOException e) {
throw new FileUploadException("Could not read the file");
}
return studyPatchRequests;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project goci by EBISPOT.
the class FileHandler method serializePojoToTsv.
public static String serializePojoToTsv(List<?> pojoList) {
CsvMapper csvMapper = new CsvMapper();
List<Map<String, Object>> dataList = csvMapper.convertValue(pojoList, new TypeReference<Object>() {
});
List<List<String>> csvData = new ArrayList<>();
List<String> csvHead = new ArrayList<>();
AtomicInteger counter = new AtomicInteger();
dataList.forEach(row -> {
List<String> rowData = new ArrayList<>();
row.forEach((key, value) -> {
rowData.add(String.valueOf(value));
if (counter.get() == 0) {
csvHead.add(key);
}
});
csvData.add(rowData);
counter.getAndIncrement();
});
CsvSchema.Builder builder = CsvSchema.builder();
csvHead.forEach(builder::addColumn);
CsvSchema schema = builder.build().withHeader().withLineSeparator("\n").withColumnSeparator('\t');
String result = "";
try {
result = csvMapper.writer(schema).writeValueAsString(csvData);
} catch (IOException e) {
throw new FileUploadException("Could not read the file");
}
return result;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project goci by EBISPOT.
the class FileHandler method getSchemaFromMultiPartFile.
public static CsvSchema getSchemaFromMultiPartFile(MultipartFile multipartFile) {
CsvSchema.Builder builder = CsvSchema.builder();
CsvSchema schema = builder.build().withHeader();
if (FilenameUtils.getExtension(multipartFile.getOriginalFilename()).equals("tsv")) {
schema = schema.withColumnSeparator('\t');
}
return schema;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project registry by hortonworks.
the class TruckEventsCsvConverter method readTruckEventsFromCsv.
private MappingIterator<TruckEvent> readTruckEventsFromCsv(InputStream csvStream) throws IOException {
CsvSchema bootstrap = CsvSchema.builder().addColumn("driverId", CsvSchema.ColumnType.NUMBER).addColumn("truckId", CsvSchema.ColumnType.NUMBER).addColumn("eventTime", CsvSchema.ColumnType.STRING).addColumn("eventType", CsvSchema.ColumnType.STRING).addColumn("longitude", CsvSchema.ColumnType.NUMBER).addColumn("latitude", CsvSchema.ColumnType.NUMBER).addColumn("eventKey", CsvSchema.ColumnType.STRING).addColumn("correlationId", CsvSchema.ColumnType.NUMBER).addColumn("driverName", CsvSchema.ColumnType.STRING).addColumn("routeId", CsvSchema.ColumnType.NUMBER).addColumn("routeName", CsvSchema.ColumnType.STRING).addColumn("eventDate", CsvSchema.ColumnType.STRING).build().withHeader();
CsvMapper csvMapper = new CsvMapper();
return csvMapper.readerFor(TruckEvent.class).with(bootstrap).readValues(csvStream);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project mapping-benchmark by arnaudroger.
the class JacksonCsvParserBenchmark method main.
public static void main(String[] args) throws IOException {
CsvParam csvParam = new CsvParam();
csvParam.setUp();
CsvMapper csvMapper = new CsvMapper();
csvMapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();
try (Reader reader = csvParam.getReader()) {
MappingIterator<City> iterator = csvMapper.readerFor(City.class).with(bootstrapSchema).readValues(reader);
while (iterator.hasNext()) {
System.out.println(iterator.next());
}
}
}
Aggregations