use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project mapping-benchmark by arnaudroger.
the class JacksonCsvParserBenchmark method setUp.
@Setup
public void setUp() {
csvMapperToStringArray = new CsvMapper();
csvMapperToStringArray.enable(com.fasterxml.jackson.dataformat.csv.CsvParser.Feature.WRAP_AS_ARRAY);
CsvMapper csvMapperToCity = new CsvMapper();
csvMapperToCity.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
CsvSchema bootstrapSchema = CsvSchema.emptySchema().withHeader();
cityReader = csvMapperToCity.readerFor(City.class).with(bootstrapSchema);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project hub-fortify-ssc-integration-service by blackducksoftware.
the class CSVUtils method writeToCSV.
/**
* It will be used to render the list of vulnerabilities in CSV
*
* @param vulnerabilities
* @param fileName
* @param delimiter
* @throws JsonGenerationException
* @throws JsonMappingException
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
*/
@SuppressWarnings("resource")
public static void writeToCSV(List<Vulnerability> vulnerabilities, String fileName, char delimiter) throws JsonGenerationException, JsonMappingException, FileNotFoundException, UnsupportedEncodingException, IOException {
// create mapper and schema
CsvMapper mapper = new CsvMapper();
// Create the schema with the header
CsvSchema schema = mapper.schemaFor(Vulnerability.class).withHeader();
schema = schema.withColumnSeparator(delimiter);
// output writer
ObjectWriter objectWriter = mapper.writer(schema);
File file = new File(fileName);
FileOutputStream fileOutputStream;
try {
fileOutputStream = new FileOutputStream(file);
} catch (FileNotFoundException e) {
throw new FileSystemNotFoundException(fileName + " CSV file is not created successfully");
}
BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(fileOutputStream, 1024);
OutputStreamWriter writerOutputStream;
try {
writerOutputStream = new OutputStreamWriter(bufferedOutputStream, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new UnsupportedEncodingException(e.getMessage());
}
// write to CSV file
try {
objectWriter.writeValue(writerOutputStream, vulnerabilities);
} catch (IOException e) {
throw new IOException("Error while rendering the vulnerabilities in CSV file::" + fileName, e);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project perun by CESNET.
the class ExtSourceCSV method csvParsing.
/**
* Parse CSV file into list of our standard "subject" (aka candidates)
*
* @param query query to check CSV file content against
* @param maxResults limit results to X row or 0 for unlimited
* @return List of Maps representing subjects for synchronization (perun_attr/constant = value).
* @throws InternalErrorException When implementation fails
* @throws IOException When reading CSV file fails
*/
private List<Map<String, String>> csvParsing(String query, int maxResults) throws IOException {
List<Map<String, String>> subjects = new ArrayList<>();
Map<String, String> attributeMapping = getCsvMapping();
File csvFile = new File(file);
CsvMapper mapper = new CsvMapper();
// use first row as header; otherwise defaults are fine
CsvSchema schema = CsvSchema.emptySchema().withHeader();
MappingIterator<Map<String, String>> it = mapper.readerFor(Map.class).with(schema).readValues(csvFile);
while (it.hasNext()) {
Map<String, String> rowAsMap = it.next();
if (compareRowToQuery(rowAsMap, query)) {
Map<String, String> singleSubject = new HashMap<>();
// translate CSV column names to perun attribute URNs
for (String key : rowAsMap.keySet()) {
singleSubject.put(attributeMapping.get(key), rowAsMap.get(key));
}
subjects.add(singleSubject);
// break if we required limited response
if (maxResults > 0) {
if (subjects.size() >= maxResults) {
break;
}
}
}
}
return subjects;
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project graylog2-server by Graylog2.
the class GenericErrorCsvWriter method writeTo.
@Override
public void writeTo(GenericError genericError, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
// Dirty hack for ValidationApiError which cannot be serialized as CSV because of the nested validationErrors field.
if (genericError instanceof ValidationApiError) {
final CsvSchema csvSchema = mapper.typedSchemaFor(ApiError.class).withHeader();
final ApiError apiError = ApiError.create(genericError.message());
mapper.writerFor(ApiError.class).with(csvSchema).writeValue(entityStream, apiError);
} else {
final CsvSchema csvSchema = mapper.typedSchemaFor(type).withHeader();
mapper.writerFor(type).with(csvSchema).writeValue(entityStream, genericError);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project goci by EBISPOT.
the class FileHandler method serializeDiseaseTraitAnalysisFile.
public static List<AnalysisDTO> serializeDiseaseTraitAnalysisFile(FileUploadRequest fileUploadRequest) {
CsvMapper mapper = new CsvMapper();
CsvSchema schema = getSchemaFromMultiPartFile(fileUploadRequest.getMultipartFile());
List<AnalysisDTO> analysisDTOS;
try {
InputStream inputStream = fileUploadRequest.getMultipartFile().getInputStream();
MappingIterator<AnalysisDTO> iterator = mapper.readerFor(AnalysisDTO.class).with(schema).readValues(inputStream);
analysisDTOS = iterator.readAll();
} catch (IOException e) {
throw new FileUploadException("Could not read the file");
}
return analysisDTOS;
}
Aggregations