use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project ma-core-public by infiniteautomation.
the class ExceptionCsvMessageConverter method writeInternal.
@Override
protected void writeInternal(Object object, Type type, HttpOutputMessage outputMessage) throws IOException, HttpMessageNotWritableException {
MediaType contentType = outputMessage.getHeaders().getContentType();
JsonEncoding encoding = getJsonEncoding(contentType);
JsonGenerator generator = this.objectMapper.getFactory().createGenerator(outputMessage.getBody(), encoding);
try {
CsvSchema schema;
if (object instanceof AbstractRestV2Exception) {
schema = this.restExceptionSchema;
object = new CsvRestException((AbstractRestV2Exception) object);
} else {
schema = this.exceptionSchema;
}
writePrefix(generator, object);
ObjectWriter objectWriter = this.objectMapper.writer().with(schema);
objectWriter.writeValue(generator, object);
writeSuffix(generator, object);
generator.flush();
} catch (JsonProcessingException ex) {
throw new HttpMessageNotWritableException("Could not write content: " + ex.getMessage(), ex);
}
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvBulkWriter method forPojo.
/**
* Builds a writer based on a POJO class definition.
*
* @param pojoClass The class of the POJO.
* @param stream The output stream.
* @param <T> The type of the elements accepted by this writer.
*/
static <T> CsvBulkWriter<T, T, Void> forPojo(Class<T> pojoClass, FSDataOutputStream stream) {
final Converter<T, T, Void> converter = (value, context) -> value;
final CsvMapper csvMapper = new CsvMapper();
final CsvSchema schema = csvMapper.schemaFor(pojoClass).withoutQuoteChar();
return new CsvBulkWriter<>(csvMapper, schema, converter, null, stream);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvFileFormatFactory method buildCsvSchema.
private static CsvSchema buildCsvSchema(RowType rowType, ReadableConfig options) {
final CsvSchema csvSchema = CsvRowSchemaConverter.convert(rowType);
final CsvSchema.Builder csvBuilder = csvSchema.rebuild();
// format properties
options.getOptional(FIELD_DELIMITER).map(s -> StringEscapeUtils.unescapeJava(s).charAt(0)).ifPresent(csvBuilder::setColumnSeparator);
if (options.get(DISABLE_QUOTE_CHARACTER)) {
csvBuilder.disableQuoteChar();
} else {
options.getOptional(QUOTE_CHARACTER).map(s -> s.charAt(0)).ifPresent(csvBuilder::setQuoteChar);
}
options.getOptional(ALLOW_COMMENTS).ifPresent(csvBuilder::setAllowComments);
options.getOptional(ARRAY_ELEMENT_DELIMITER).ifPresent(csvBuilder::setArrayElementSeparator);
options.getOptional(ESCAPE_CHARACTER).map(s -> s.charAt(0)).ifPresent(csvBuilder::setEscapeChar);
options.getOptional(NULL_LITERAL).ifPresent(csvBuilder::setNullValue);
return csvBuilder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvFileFormatFactory method createEncodingFormat.
@Override
public EncodingFormat<Factory<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
return new EncodingFormat<BulkWriter.Factory<RowData>>() {
@Override
public BulkWriter.Factory<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType physicalDataType) {
final RowType rowType = (RowType) physicalDataType.getLogicalType();
final CsvSchema schema = buildCsvSchema(rowType, formatOptions);
final RowDataToCsvConverter converter = RowDataToCsvConverters.createRowConverter(rowType);
final CsvMapper mapper = new CsvMapper();
final ObjectNode container = mapper.createObjectNode();
final RowDataToCsvConverter.RowDataToCsvFormatConverterContext converterContext = new RowDataToCsvConverter.RowDataToCsvFormatConverterContext(mapper, container);
return out -> CsvBulkWriter.forSchema(mapper, schema, converter, converterContext, out);
}
@Override
public ChangelogMode getChangelogMode() {
return ChangelogMode.insertOnly();
}
};
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowDataSerializationSchema method equals.
@Override
public boolean equals(Object o) {
if (o == null || o.getClass() != this.getClass()) {
return false;
}
if (this == o) {
return true;
}
final CsvRowDataSerializationSchema that = (CsvRowDataSerializationSchema) o;
final CsvSchema otherSchema = that.csvSchema;
return rowType.equals(that.rowType) && csvSchema.getColumnSeparator() == otherSchema.getColumnSeparator() && Arrays.equals(csvSchema.getLineSeparator(), otherSchema.getLineSeparator()) && csvSchema.getArrayElementSeparator().equals(otherSchema.getArrayElementSeparator()) && csvSchema.getQuoteChar() == otherSchema.getQuoteChar() && csvSchema.getEscapeChar() == otherSchema.getEscapeChar() && Arrays.equals(csvSchema.getNullValue(), otherSchema.getNullValue());
}
Aggregations