use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.
the class ColumnJsonDeserializer method deserialize.
@Override
public Column deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
String columnName = jsonNode.required(NAME).asText();
String columnKind = Optional.ofNullable(jsonNode.get(KIND)).map(JsonNode::asText).orElse(KIND_PHYSICAL);
Column column;
switch(columnKind) {
case KIND_PHYSICAL:
column = deserializePhysicalColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_COMPUTED:
column = deserializeComputedColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
case KIND_METADATA:
column = deserializeMetadataColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
break;
default:
throw new ValidationException(String.format("Cannot recognize column type '%s'. Allowed types: %s.", columnKind, SUPPORTED_KINDS));
}
return column.withComment(deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null));
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowTypeInfo} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowTypeInfo rowType) {
final Builder builder = new CsvSchema.Builder();
final String[] fields = rowType.getFieldNames();
final TypeInformation<?>[] types = rowType.getFieldTypes();
for (int i = 0; i < rowType.getArity(); i++) {
builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.
the class CsvRowSchemaConverter method convert.
/**
* Convert {@link RowType} to {@link CsvSchema}.
*/
public static CsvSchema convert(RowType rowType) {
Builder builder = new CsvSchema.Builder();
List<RowType.RowField> fields = rowType.getFields();
for (int i = 0; i < rowType.getFieldCount(); i++) {
String fieldName = fields.get(i).getName();
LogicalType fieldType = fields.get(i).getType();
builder.addColumn(new Column(i, fieldName, convertType(fieldName, fieldType)));
}
return builder.build();
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.
the class ResolvedSchemaJsonDeserializer method deserialize.
@Override
public ResolvedSchema deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
ObjectNode jsonNode = jsonParser.readValueAsTree();
List<Column> columns = ctx.readValue(traverse(jsonNode.required(COLUMNS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, Column.class));
List<WatermarkSpec> watermarkSpecs = ctx.readValue(traverse(jsonNode.required(WATERMARK_SPECS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, WatermarkSpec.class));
UniqueConstraint primaryKey = deserializeOptionalField(jsonNode, PRIMARY_KEY, UniqueConstraint.class, jsonParser.getCodec(), ctx).orElse(null);
return new ResolvedSchema(columns, watermarkSpecs, primaryKey);
}
use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.
the class StreamExecSink method translateToPlanInternal.
@SuppressWarnings("unchecked")
@Override
protected Transformation<Object> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
final ExecEdge inputEdge = getInputEdges().get(0);
final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
final RowType inputRowType = (RowType) inputEdge.getOutputType();
final DynamicTableSink tableSink = tableSinkSpec.getTableSink(planner.getFlinkContext());
final boolean isCollectSink = tableSink instanceof CollectDynamicSink;
final List<Integer> rowtimeFieldIndices = new ArrayList<>();
for (int i = 0; i < inputRowType.getFieldCount(); ++i) {
if (TypeCheckUtils.isRowTime(inputRowType.getTypeAt(i))) {
rowtimeFieldIndices.add(i);
}
}
final int rowtimeFieldIndex;
if (rowtimeFieldIndices.size() > 1 && !isCollectSink) {
throw new TableException(String.format("The query contains more than one rowtime attribute column [%s] for writing into table '%s'.\n" + "Please select the column that should be used as the event-time timestamp " + "for the table sink by casting all other columns to regular TIMESTAMP or TIMESTAMP_LTZ.", rowtimeFieldIndices.stream().map(i -> inputRowType.getFieldNames().get(i)).collect(Collectors.joining(", ")), tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString()));
} else if (rowtimeFieldIndices.size() == 1) {
rowtimeFieldIndex = rowtimeFieldIndices.get(0);
} else {
rowtimeFieldIndex = -1;
}
return createSinkTransformation(planner.getExecEnv(), config, inputTransform, tableSink, rowtimeFieldIndex, upsertMaterialize);
}
Aggregations