use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper in project goci by EBISPOT.
the class FileHandler method serializeDiseaseTraitAnalysisFile.
public static List<AnalysisDTO> serializeDiseaseTraitAnalysisFile(FileUploadRequest fileUploadRequest) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = getSchemaFromMultiPartFile(fileUploadRequest.getMultipartFile());
List<AnalysisDTO> analysisDTOS;
try {
InputStream inputStream = fileUploadRequest.getMultipartFile().getInputStream();
MappingIterator<AnalysisDTO> iterator = mapper.readerFor(AnalysisDTO.class).with(schema).readValues(inputStream);
analysisDTOS = iterator.readAll();
} catch (IOException e) {
throw new FileUploadException("Could not read the file");
}
return analysisDTOS;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper in project goci by EBISPOT.
the class DiseaseTraitDtoAssembler method disassemble.
public static List<DiseaseTrait> disassemble(MultipartFile multipartFile) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = FileHandler.getSchemaFromMultiPartFile(multipartFile);
List<DiseaseTraitDto> diseaseTraitDtos;
try {
InputStream inputStream = multipartFile.getInputStream();
MappingIterator<DiseaseTraitDto> iterator = mapper.readerFor(DiseaseTraitDto.class).with(schema).readValues(inputStream);
diseaseTraitDtos = iterator.readAll();
} catch (IOException ex) {
throw new FileUploadException("Could not read the file");
}
List<DiseaseTrait> diseaseTraits = new ArrayList<>();
diseaseTraitDtos.forEach(diseaseTraitDTO -> {
DiseaseTrait diseaseTrait = new DiseaseTrait();
diseaseTrait.setTrait(diseaseTraitDTO.getTrait());
diseaseTraits.add(diseaseTrait);
});
return diseaseTraits;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by apache.
the class DataStreamCsvITCase method factoryForPojo.
private static <T> BulkWriter.Factory<T> factoryForPojo(Class<T> pojoClass) {
final Converter<T, T, Void> converter = (value, context) -> value;
final CsvMapper csvMapper = new CsvMapper();
final CsvSchema schema = csvMapper.schemaFor(pojoClass).withoutQuoteChar();
return (out) -> new CsvBulkWriter<>(csvMapper, schema, converter, null, out);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by apache.
the class DataStreamCsvITCase method testCsvReaderFormatFromSchema.
@Test
public void testCsvReaderFormatFromSchema() throws Exception {
writeFile(outDir, "data.csv", CSV_LINES_PIPE_SEPARATED);
CsvMapper mapper = new CsvMapper();
CsvSchema schema = mapper.schemaFor(CityPojo.class).withoutQuoteChar().withColumnSeparator('|');
final CsvReaderFormat<CityPojo> csvFormat = CsvReaderFormat.forSchema(mapper, schema, TypeInformation.of(CityPojo.class));
final List<CityPojo> result = initializeSourceAndReadData(outDir, csvFormat);
assertThat(Arrays.asList(POJOS)).isEqualTo(result);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by apache.
the class RowDataToCsvConverters method createArrayRowFieldConverter.
private static RowFieldConverter createArrayRowFieldConverter(ArrayType type) {
LogicalType elementType = type.getElementType();
final ArrayElementConverter elementConverter = createNullableArrayElementConverter(elementType);
return (csvMapper, container, row, pos) -> {
ArrayNode arrayNode = csvMapper.createArrayNode();
ArrayData arrayData = row.getArray(pos);
int numElements = arrayData.size();
for (int i = 0; i < numElements; i++) {
arrayNode.add(elementConverter.convert(csvMapper, arrayNode, arrayData, i));
}
return arrayNode;
};
}
Aggregations