use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project mapping-benchmark by arnaudroger.
the class JacksonCsvParserBenchmark method main.
public static void main(String[] args) throws IOException {
CsvParam csvParam = new CsvParam();
csvParam.setUp();
CsvMapper csvMapper = new CsvMapper();
csvMapper.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();
try (Reader reader = csvParam.getReader()) {
MappingIterator<City> iterator = csvMapper.readerFor(City.class).with(bootstrapSchema).readValues(reader);
while (iterator.hasNext()) {
System.out.println(iterator.next());
}
}
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project mapping-benchmark by arnaudroger.
the class JacksonCsvParserBenchmark method setUp.
@Setup
public void setUp() {
csvMapperToStringArray = new CsvMapper();
csvMapperToStringArray.enable(com.fasterxml.jackson.dataformat.csv.CsvParser.Feature.WRAP_AS_ARRAY);
CsvMapper csvMapperToCity = new CsvMapper();
csvMapperToCity.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();
cityReader = csvMapperToCity.readerFor(City.class).with(bootstrapSchema);
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project hub-fortify-ssc-integration-service by blackducksoftware.
the class CSVUtils method writeToCSV.
/**
* It will be used to render the list of vulnerabilities in CSV
*
* @param vulnerabilities
* @param fileName
* @param delimiter
* @throws JsonGenerationException
* @throws JsonMappingException
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
*/
@SuppressWarnings("resource")
public static void writeToCSV(List<Vulnerability> vulnerabilities, String fileName, char delimiter) throws JsonGenerationException, JsonMappingException, FileNotFoundException, UnsupportedEncodingException, IOException {
// create mapper and schema
CsvMapper mapper = new CsvMapper();
// Create the schema with the header
CsvSchema schema = mapper.schemaFor(Vulnerability.class).withHeader();
schema = schema.withColumnSeparator(delimiter);
// output writer
ObjectWriter objectWriter = mapper.writer(schema);
File file = new File(fileName);
FileOutputStream fileOutputStream;
try {
fileOutputStream = new FileOutputStream(file);
} catch (FileNotFoundException e) {
throw new FileSystemNotFoundException(fileName + " CSV file is not created successfully");
}
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(fileOutputStream, 1024);
OutputStreamWriter writerOutputStream;
try {
writerOutputStream = new OutputStreamWriter(bufferedOutputStream, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new UnsupportedEncodingException(e.getMessage());
}
// write to CSV file
try {
objectWriter.writeValue(writerOutputStream, vulnerabilities);
} catch (IOException e) {
throw new IOException("Error while rendering the vulnerabilities in CSV file::" + fileName, e);
}
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project goci by EBISPOT.
the class FileHandler method serializeDiseaseTraitAnalysisFile.
public static List<AnalysisDTO> serializeDiseaseTraitAnalysisFile(FileUploadRequest fileUploadRequest) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = getSchemaFromMultiPartFile(fileUploadRequest.getMultipartFile());
List<AnalysisDTO> analysisDTOS;
try {
InputStream inputStream = fileUploadRequest.getMultipartFile().getInputStream();
MappingIterator<AnalysisDTO> iterator = mapper.readerFor(AnalysisDTO.class).with(schema).readValues(inputStream);
analysisDTOS = iterator.readAll();
} catch (IOException e) {
throw new FileUploadException("Could not read the file");
}
return analysisDTOS;
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project goci by EBISPOT.
the class DiseaseTraitDtoAssembler method disassemble.
public static List<DiseaseTrait> disassemble(MultipartFile multipartFile) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = FileHandler.getSchemaFromMultiPartFile(multipartFile);
List<DiseaseTraitDto> diseaseTraitDtos;
try {
InputStream inputStream = multipartFile.getInputStream();
MappingIterator<DiseaseTraitDto> iterator = mapper.readerFor(DiseaseTraitDto.class).with(schema).readValues(inputStream);
diseaseTraitDtos = iterator.readAll();
} catch (IOException ex) {
throw new FileUploadException("Could not read the file");
}
List<DiseaseTrait> diseaseTraits = new ArrayList<>();
diseaseTraitDtos.forEach(diseaseTraitDTO -> {
DiseaseTrait diseaseTrait = new DiseaseTrait();
diseaseTrait.setTrait(diseaseTraitDTO.getTrait());
diseaseTraits.add(diseaseTrait);
});
return diseaseTraits;
}
Aggregations