Search in sources :

Example 1 with Column

use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.

the class ColumnJsonDeserializer method deserialize.

@Override
public Column deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
    ObjectNode jsonNode = jsonParser.readValueAsTree();
    String columnName = jsonNode.required(NAME).asText();
    String columnKind = Optional.ofNullable(jsonNode.get(KIND)).map(JsonNode::asText).orElse(KIND_PHYSICAL);
    Column column;
    switch(columnKind) {
        case KIND_PHYSICAL:
            column = deserializePhysicalColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        case KIND_COMPUTED:
            column = deserializeComputedColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        case KIND_METADATA:
            column = deserializeMetadataColumn(columnName, jsonNode, jsonParser.getCodec(), ctx);
            break;
        default:
            throw new ValidationException(String.format("Cannot recognize column type '%s'. Allowed types: %s.", columnKind, SUPPORTED_KINDS));
    }
    return column.withComment(deserializeOptionalField(jsonNode, COMMENT, String.class, jsonParser.getCodec(), ctx).orElse(null));
}
Also used : ValidationException(org.apache.flink.table.api.ValidationException) ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) Column(org.apache.flink.table.catalog.Column)

Example 2 with Column

use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.

the class CsvRowSchemaConverter method convert.

/**
 * Convert {@link RowTypeInfo} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowTypeInfo rowType) {
    final Builder builder = new CsvSchema.Builder();
    final String[] fields = rowType.getFieldNames();
    final TypeInformation<?>[] types = rowType.getFieldTypes();
    for (int i = 0; i < rowType.getArity(); i++) {
        builder.addColumn(new Column(i, fields[i], convertType(fields[i], types[i])));
    }
    return builder.build();
}
Also used : Column(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column) Builder(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder) TypeInformation(org.apache.flink.api.common.typeinfo.TypeInformation)

Example 3 with Column

use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.

the class CsvRowSchemaConverter method convert.

/**
 * Convert {@link RowType} to {@link CsvSchema}.
 */
public static CsvSchema convert(RowType rowType) {
    Builder builder = new CsvSchema.Builder();
    List<RowType.RowField> fields = rowType.getFields();
    for (int i = 0; i < rowType.getFieldCount(); i++) {
        String fieldName = fields.get(i).getName();
        LogicalType fieldType = fields.get(i).getType();
        builder.addColumn(new Column(i, fieldName, convertType(fieldName, fieldType)));
    }
    return builder.build();
}
Also used : Column(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column) Builder(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder) LogicalType(org.apache.flink.table.types.logical.LogicalType)

Example 4 with Column

use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.

the class ResolvedSchemaJsonDeserializer method deserialize.

@Override
public ResolvedSchema deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
    ObjectNode jsonNode = jsonParser.readValueAsTree();
    List<Column> columns = ctx.readValue(traverse(jsonNode.required(COLUMNS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, Column.class));
    List<WatermarkSpec> watermarkSpecs = ctx.readValue(traverse(jsonNode.required(WATERMARK_SPECS), jsonParser.getCodec()), ctx.getTypeFactory().constructCollectionType(List.class, WatermarkSpec.class));
    UniqueConstraint primaryKey = deserializeOptionalField(jsonNode, PRIMARY_KEY, UniqueConstraint.class, jsonParser.getCodec(), ctx).orElse(null);
    return new ResolvedSchema(columns, watermarkSpecs, primaryKey);
}
Also used : ObjectNode(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode) WatermarkSpec(org.apache.flink.table.catalog.WatermarkSpec) Column(org.apache.flink.table.catalog.Column) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) List(java.util.List) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema)

Example 5 with Column

use of org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column in project flink by apache.

the class StreamExecSink method translateToPlanInternal.

@SuppressWarnings("unchecked")
@Override
protected Transformation<Object> translateToPlanInternal(PlannerBase planner, ExecNodeConfig config) {
    final ExecEdge inputEdge = getInputEdges().get(0);
    final Transformation<RowData> inputTransform = (Transformation<RowData>) inputEdge.translateToPlan(planner);
    final RowType inputRowType = (RowType) inputEdge.getOutputType();
    final DynamicTableSink tableSink = tableSinkSpec.getTableSink(planner.getFlinkContext());
    final boolean isCollectSink = tableSink instanceof CollectDynamicSink;
    final List<Integer> rowtimeFieldIndices = new ArrayList<>();
    for (int i = 0; i < inputRowType.getFieldCount(); ++i) {
        if (TypeCheckUtils.isRowTime(inputRowType.getTypeAt(i))) {
            rowtimeFieldIndices.add(i);
        }
    }
    final int rowtimeFieldIndex;
    if (rowtimeFieldIndices.size() > 1 && !isCollectSink) {
        throw new TableException(String.format("The query contains more than one rowtime attribute column [%s] for writing into table '%s'.\n" + "Please select the column that should be used as the event-time timestamp " + "for the table sink by casting all other columns to regular TIMESTAMP or TIMESTAMP_LTZ.", rowtimeFieldIndices.stream().map(i -> inputRowType.getFieldNames().get(i)).collect(Collectors.joining(", ")), tableSinkSpec.getContextResolvedTable().getIdentifier().asSummaryString()));
    } else if (rowtimeFieldIndices.size() == 1) {
        rowtimeFieldIndex = rowtimeFieldIndices.get(0);
    } else {
        rowtimeFieldIndex = -1;
    }
    return createSinkTransformation(planner.getExecEnv(), config, inputTransform, tableSink, rowtimeFieldIndex, upsertMaterialize);
}
Also used : InputProperty(org.apache.flink.table.planner.plan.nodes.exec.InputProperty) JsonCreator(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) CollectDynamicSink(org.apache.flink.table.planner.connectors.CollectDynamicSink) RowType(org.apache.flink.table.types.logical.RowType) JsonInclude(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude) ExecNode(org.apache.flink.table.planner.plan.nodes.exec.ExecNode) ArrayList(java.util.ArrayList) DynamicTableSinkSpec(org.apache.flink.table.planner.plan.nodes.exec.spec.DynamicTableSinkSpec) FlinkVersion(org.apache.flink.FlinkVersion) TypeCheckUtils(org.apache.flink.table.runtime.typeutils.TypeCheckUtils) ExecNodeContext(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext) RowData(org.apache.flink.table.data.RowData) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) PlannerBase(org.apache.flink.table.planner.delegation.PlannerBase) ExecNodeMetadata(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata) ExecNodeConfig(org.apache.flink.table.planner.plan.nodes.exec.ExecNodeConfig) TableException(org.apache.flink.table.api.TableException) CommonExecSink(org.apache.flink.table.planner.plan.nodes.exec.common.CommonExecSink) Collectors(java.util.stream.Collectors) JsonProperty(org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty) List(java.util.List) ExecEdge(org.apache.flink.table.planner.plan.nodes.exec.ExecEdge) LogicalType(org.apache.flink.table.types.logical.LogicalType) Transformation(org.apache.flink.api.dag.Transformation) Collections(java.util.Collections) Transformation(org.apache.flink.api.dag.Transformation) TableException(org.apache.flink.table.api.TableException) ExecEdge(org.apache.flink.table.planner.plan.nodes.exec.ExecEdge) ArrayList(java.util.ArrayList) RowType(org.apache.flink.table.types.logical.RowType) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) CollectDynamicSink(org.apache.flink.table.planner.connectors.CollectDynamicSink) RowData(org.apache.flink.table.data.RowData)

Aggregations

List (java.util.List)2 ObjectNode (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode)2 Builder (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Builder)2 Column (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.dataformat.csv.CsvSchema.Column)2 Column (org.apache.flink.table.catalog.Column)2 LogicalType (org.apache.flink.table.types.logical.LogicalType)2 ArrayList (java.util.ArrayList)1 Collections (java.util.Collections)1 Collectors (java.util.stream.Collectors)1 FlinkVersion (org.apache.flink.FlinkVersion)1 TypeInformation (org.apache.flink.api.common.typeinfo.TypeInformation)1 Transformation (org.apache.flink.api.dag.Transformation)1 JsonCreator (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator)1 JsonInclude (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonInclude)1 JsonProperty (org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty)1 TableException (org.apache.flink.table.api.TableException)1 ValidationException (org.apache.flink.table.api.ValidationException)1 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)1 UniqueConstraint (org.apache.flink.table.catalog.UniqueConstraint)1 WatermarkSpec (org.apache.flink.table.catalog.WatermarkSpec)1