use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project xm-ms-entity by xm-online.
the class EntityToCsvConverterUtils method createDefaultCsvMapper.
private static CsvMapper createDefaultCsvMapper() {
CsvMapper mapper = new CsvMapper();
mapper.configure(JsonGenerator.Feature.IGNORE_UNKNOWN, true);
mapper.registerModule(new JavaTimeModule());
return mapper;
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project pdxfinder by PDCMFinder.
the class UtilityService method serializeToCsvWithIncludeNonEmpty.
public String serializeToCsvWithIncludeNonEmpty(List<?> pojoList) throws IOException {
CsvMapper csvMapper = new CsvMapper();
List<Map<String, Object>> dataList = mapper.convertValue(pojoList, new TypeReference<List<Map<String, Object>>>() {
});
List<List<String>> csvData = new ArrayList<>();
List<String> csvHead = new ArrayList<>();
AtomicInteger counter = new AtomicInteger();
dataList.forEach(row -> {
List<String> rowData = new ArrayList<>();
row.forEach((key, value) -> {
rowData.add(String.valueOf(value));
if (counter.get() == 0) {
csvHead.add(key);
}
});
csvData.add(rowData);
counter.getAndIncrement();
});
CsvSchema.Builder builder = CsvSchema.builder();
csvHead.forEach(builder::addColumn);
CsvSchema schema = builder.build().withHeader();
return csvMapper.writer(schema).writeValueAsString(csvData);
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by splunk.
the class CsvFileFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<Factory<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
return new EncodingFormat<BulkWriter.Factory<RowData>>() {
@Override
public BulkWriter.Factory<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType physicalDataType) {
final RowType rowType = (RowType) physicalDataType.getLogicalType();
final CsvSchema schema = buildCsvSchema(rowType, formatOptions);
final RowDataToCsvConverter converter = RowDataToCsvConverters.createRowConverter(rowType);
final CsvMapper mapper = new CsvMapper();
final ObjectNode container = mapper.createObjectNode();
final RowDataToCsvConverter.RowDataToCsvFormatConverterContext converterContext = new RowDataToCsvConverter.RowDataToCsvFormatConverterContext(mapper, container);
return out -> CsvBulkWriter.forSchema(mapper, schema, converter, converterContext, out);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by splunk.
the class CsvBulkWriter method forPojo.
/**
* Builds a writer based on a POJO class definition.
*
* @param pojoClass The class of the POJO.
* @param stream The output stream.
* @param <T> The type of the elements accepted by this writer.
*/
static <T> CsvBulkWriter<T, T, Void> forPojo(Class<T> pojoClass, FSDataOutputStream stream) {
final Converter<T, T, Void> converter = (value, context) -> value;
final CsvMapper csvMapper = new CsvMapper();
final CsvSchema schema = csvMapper.schemaFor(pojoClass).withoutQuoteChar();
return new CsvBulkWriter<>(csvMapper, schema, converter, null, stream);
}
use of com.fasterxml.jackson.dataformat.csv.CsvMapper in project flink by splunk.
the class RowDataToCsvConverters method createArrayRowFieldConverter.
private static RowFieldConverter createArrayRowFieldConverter(ArrayType type) {
LogicalType elementType = type.getElementType();
final ArrayElementConverter elementConverter = createNullableArrayElementConverter(elementType);
return (csvMapper, container, row, pos) -> {
ArrayNode arrayNode = csvMapper.createArrayNode();
ArrayData arrayData = row.getArray(pos);
int numElements = arrayData.size();
for (int i = 0; i < numElements; i++) {
arrayNode.add(elementConverter.convert(csvMapper, arrayNode, arrayData, i));
}
return arrayNode;
};
}
Aggregations