use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project goci by EBISPOT.
the class DiseaseTraitDtoAssembler method disassemble.
public static List<DiseaseTrait> disassemble(MultipartFile multipartFile) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = FileHandler.getSchemaFromMultiPartFile(multipartFile);
List<DiseaseTraitDto> diseaseTraitDtos;
try {
InputStream inputStream = multipartFile.getInputStream();
MappingIterator<DiseaseTraitDto> iterator = mapper.readerFor(DiseaseTraitDto.class).with(schema).readValues(inputStream);
diseaseTraitDtos = iterator.readAll();
} catch (IOException ex) {
throw new FileUploadException("Could not read the file");
}
List<DiseaseTrait> diseaseTraits = new ArrayList<>();
diseaseTraitDtos.forEach(diseaseTraitDTO -> {
DiseaseTrait diseaseTrait = new DiseaseTrait();
diseaseTrait.setTrait(diseaseTraitDTO.getTrait());
diseaseTraits.add(diseaseTrait);
});
return diseaseTraits;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class DataStreamCsvITCase method factoryForPojo.
private static <T> BulkWriter.Factory<T> factoryForPojo(Class<T> pojoClass) {
final Converter<T, T, Void> converter = (value, context) -> value;
final CsvMapper csvMapper = new CsvMapper();
final CsvSchema schema = csvMapper.schemaFor(pojoClass).withoutQuoteChar();
return (out) -> new CsvBulkWriter<>(csvMapper, schema, converter, null, out);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class DataStreamCsvITCase method testCsvReaderFormatFromSchema.
@Test
public void testCsvReaderFormatFromSchema() throws Exception {
writeFile(outDir, "data.csv", CSV_LINES_PIPE_SEPARATED);
CsvMapper mapper = new CsvMapper();
CsvSchema schema = mapper.schemaFor(CityPojo.class).withoutQuoteChar().withColumnSeparator('|');
final CsvReaderFormat<CityPojo> csvFormat = CsvReaderFormat.forSchema(mapper, schema, TypeInformation.of(CityPojo.class));
final List<CityPojo> result = initializeSourceAndReadData(outDir, csvFormat);
assertThat(Arrays.asList(POJOS)).isEqualTo(result);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowTypeInfo} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowTypeInfo rowType) {
final Builder builder = new CsvSchema.Builder();
final String[] fields = rowType.getFieldNames();
final TypeInformation<?>[] types = rowType.getFieldTypes();
for (int i = 0; i < rowType.getArity(); i++) {
builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowType} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowType rowType) {
Builder builder = new CsvSchema.Builder();
List<RowType.RowField> fields = rowType.getFields();
for (int i = 0; i < rowType.getFieldCount(); i++) {
String fieldName = fields.get(i).getName();
LogicalType fieldType = fields.get(i).getType();
builder.addColumn(new Column(i, fieldName, convertType(fieldName, fieldType)));
}
return builder.build();
}
Aggregations