use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowDeserializationSchema method equals.
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || o.getClass() != this.getClass()) {
return false;
}
final CsvRowDeserializationSchema that = (CsvRowDeserializationSchema) o;
final CsvSchema otherSchema = that.csvSchema;
return typeInfo.equals(that.typeInfo) && ignoreParseErrors == that.ignoreParseErrors && csvSchema.getColumnSeparator() == otherSchema.getColumnSeparator() && csvSchema.allowsComments() == otherSchema.allowsComments() && csvSchema.getArrayElementSeparator().equals(otherSchema.getArrayElementSeparator()) && csvSchema.getQuoteChar() == otherSchema.getQuoteChar() && csvSchema.getEscapeChar() == otherSchema.getEscapeChar() && Arrays.equals(csvSchema.getNullValue(), otherSchema.getNullValue());
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowSerializationSchema method equals.
@Override
public boolean equals(Object o) {
if (o == null || o.getClass() != this.getClass()) {
return false;
}
if (this == o) {
return true;
}
final CsvRowSerializationSchema that = (CsvRowSerializationSchema) o;
final CsvSchema otherSchema = that.csvSchema;
return typeInfo.equals(that.typeInfo) && csvSchema.getColumnSeparator() == otherSchema.getColumnSeparator() && Arrays.equals(csvSchema.getLineSeparator(), otherSchema.getLineSeparator()) && csvSchema.getArrayElementSeparator().equals(otherSchema.getArrayElementSeparator()) && csvSchema.getQuoteChar() == otherSchema.getQuoteChar() && csvSchema.getEscapeChar() == otherSchema.getEscapeChar() && Arrays.equals(csvSchema.getNullValue(), otherSchema.getNullValue());
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project flink by apache.
the class CsvRowDataDeserializationSchema method equals.
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || o.getClass() != this.getClass()) {
return false;
}
final CsvRowDataDeserializationSchema that = (CsvRowDataDeserializationSchema) o;
final CsvSchema otherSchema = that.csvSchema;
return resultTypeInfo.equals(that.resultTypeInfo) && ignoreParseErrors == that.ignoreParseErrors && csvSchema.getColumnSeparator() == otherSchema.getColumnSeparator() && csvSchema.allowsComments() == otherSchema.allowsComments() && csvSchema.getArrayElementSeparator().equals(otherSchema.getArrayElementSeparator()) && csvSchema.getQuoteChar() == otherSchema.getQuoteChar() && csvSchema.getEscapeChar() == otherSchema.getEscapeChar() && Arrays.equals(csvSchema.getNullValue(), otherSchema.getNullValue());
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project dhis2-core by dhis2.
the class JacksonCsvUtils method toCsv.
/**
* Writes the given response to the given output stream as CSV using
* {@link CsvMapper}. The schema is inferred from the given type using
* {@CsvSchema}. A header line is included.
*
* @param value the value to write.
* @param out the {@link OutputStream} to write to.
* @throws IOException if the write operation fails.
*/
public static void toCsv(Object value, Class<?> type, OutputStream out) throws IOException {
CsvMapper csvMapper = JacksonObjectMapperConfig.csvMapper;
CsvSchema schema = csvMapper.schemaFor(type).withHeader();
ObjectWriter writer = csvMapper.writer(schema);
writer.writeValue(out, value);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema in project dhis2-core by dhis2.
the class AbstractFullReadOnlyController method getObjectListCsv.
@GetMapping(produces = "application/csv")
public void getObjectListCsv(@RequestParam Map<String, String> rpParameters, OrderParams orderParams, @CurrentUser User currentUser, @RequestParam(defaultValue = ",") char separator, @RequestParam(defaultValue = "false") boolean skipHeader, HttpServletResponse response) throws IOException {
List<Order> orders = orderParams.getOrders(getSchema());
List<String> fields = Lists.newArrayList(contextService.getParameterValues("fields"));
List<String> filters = Lists.newArrayList(contextService.getParameterValues("filter"));
WebOptions options = new WebOptions(rpParameters);
WebMetadata metadata = new WebMetadata();
if (fields.isEmpty()) {
fields.addAll(Preset.defaultPreset().getFields());
}
// only support metadata
if (!getSchema().isMetadata()) {
throw new HttpClientErrorException(HttpStatus.NOT_FOUND);
}
if (!aclService.canRead(currentUser, getEntityClass())) {
throw new ReadAccessDeniedException("You don't have the proper permissions to read objects of this type.");
}
List<T> entities = getEntityList(metadata, options, filters, orders);
CsvSchema schema;
CsvSchema.Builder schemaBuilder = CsvSchema.builder();
List<Property> properties = new ArrayList<>();
for (String field : fields) {
// then the group[id] part is simply ignored.
for (String splitField : field.split(",")) {
Property property = getSchema().getProperty(splitField);
if (property == null || !property.isSimple()) {
continue;
}
schemaBuilder.addColumn(property.getName());
properties.add(property);
}
}
schema = schemaBuilder.build().withColumnSeparator(separator);
if (!skipHeader) {
schema = schema.withHeader();
}
CsvMapper csvMapper = new CsvMapper();
csvMapper.configure(JsonGenerator.Feature.IGNORE_UNKNOWN, true);
List<Map<String, Object>> csvObjects = entities.stream().map(e -> {
Map<String, Object> map = new HashMap<>();
for (Property property : properties) {
Object value = ReflectionUtils.invokeMethod(e, property.getGetterMethod());
map.put(property.getName(), value);
}
return map;
}).collect(toList());
csvMapper.writer(schema).writeValue(response.getWriter(), csvObjects);
response.flushBuffer();
}
Aggregations