Search in sources :

Example 66 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class RegistryAvroFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Optional<String> subject = formatOptions.getOptional(SUBJECT);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    if (!subject.isPresent()) {
        throw new ValidationException(String.format("Option %s.%s is required for serialization", IDENTIFIER, SUBJECT.key()));
    }
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new AvroRowDataSerializationSchema(rowType, ConfluentRegistryAvroSerializationSchema.forGeneric(subject.get(), AvroSchemaConverter.convertToSchema(rowType), schemaRegistryURL, optionalPropertiesMap), RowDataToAvroConverters.createConverter(rowType));
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) AvroRowDataSerializationSchema(org.apache.flink.formats.avro.AvroRowDataSerializationSchema) ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 67 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class RegistryAvroFormatFactory method createDecodingFormat.

@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    return new ProjectableDecodingFormat<DeserializationSchema<RowData>>() {

        @Override
        public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType producedDataType, int[][] projections) {
            producedDataType = Projection.of(projections).project(producedDataType);
            final RowType rowType = (RowType) producedDataType.getLogicalType();
            final TypeInformation<RowData> rowDataTypeInfo = context.createTypeInformation(producedDataType);
            return new AvroRowDataDeserializationSchema(ConfluentRegistryAvroDeserializationSchema.forGeneric(AvroSchemaConverter.convertToSchema(rowType), schemaRegistryURL, optionalPropertiesMap), AvroToRowDataConverters.createRowConverter(rowType), rowDataTypeInfo);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : RowData(org.apache.flink.table.data.RowData) ProjectableDecodingFormat(org.apache.flink.table.connector.format.ProjectableDecodingFormat) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType) AvroRowDataDeserializationSchema(org.apache.flink.formats.avro.AvroRowDataDeserializationSchema)

Example 68 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class DebeziumAvroFormatFactory method createEncodingFormat.

@Override
public EncodingFormat<SerializationSchema<RowData>> createEncodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Optional<String> subject = formatOptions.getOptional(SUBJECT);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    if (!subject.isPresent()) {
        throw new ValidationException(String.format("Option '%s.%s' is required for serialization", IDENTIFIER, SUBJECT.key()));
    }
    return new EncodingFormat<SerializationSchema<RowData>>() {

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
        }

        @Override
        public SerializationSchema<RowData> createRuntimeEncoder(DynamicTableSink.Context context, DataType consumedDataType) {
            final RowType rowType = (RowType) consumedDataType.getLogicalType();
            return new DebeziumAvroSerializationSchema(rowType, schemaRegistryURL, subject.get(), optionalPropertiesMap);
        }
    };
}
Also used : EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) RowData(org.apache.flink.table.data.RowData) ValidationException(org.apache.flink.table.api.ValidationException) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 69 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class DebeziumAvroFormatFactory method createDecodingFormat.

@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    String schemaRegistryURL = formatOptions.get(URL);
    Map<String, ?> optionalPropertiesMap = buildOptionalPropertiesMap(formatOptions);
    return new ProjectableDecodingFormat<DeserializationSchema<RowData>>() {

        @Override
        public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType producedDataType, int[][] projections) {
            producedDataType = Projection.of(projections).project(producedDataType);
            final RowType rowType = (RowType) producedDataType.getLogicalType();
            final TypeInformation<RowData> producedTypeInfo = context.createTypeInformation(producedDataType);
            return new DebeziumAvroDeserializationSchema(rowType, producedTypeInfo, schemaRegistryURL, optionalPropertiesMap);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.newBuilder().addContainedKind(RowKind.INSERT).addContainedKind(RowKind.UPDATE_BEFORE).addContainedKind(RowKind.UPDATE_AFTER).addContainedKind(RowKind.DELETE).build();
        }
    };
}
Also used : RowData(org.apache.flink.table.data.RowData) ProjectableDecodingFormat(org.apache.flink.table.connector.format.ProjectableDecodingFormat) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 70 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class ArrowReaderWriterTest method createArrowWriter.

@Override
public Tuple2<ArrowWriter<RowData>, ArrowStreamWriter> createArrowWriter(OutputStream outputStream) throws IOException {
    VectorSchemaRoot root = VectorSchemaRoot.create(ArrowUtils.toArrowSchema(rowType), allocator);
    ArrowWriter<RowData> arrowWriter = ArrowUtils.createRowDataArrowWriter(root, rowType);
    ArrowStreamWriter arrowStreamWriter = new ArrowStreamWriter(root, null, outputStream);
    arrowStreamWriter.start();
    return Tuple2.of(arrowWriter, arrowStreamWriter);
}
Also used : VectorSchemaRoot(org.apache.arrow.vector.VectorSchemaRoot) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) BinaryRowData(org.apache.flink.table.data.binary.BinaryRowData) ArrowStreamWriter(org.apache.arrow.vector.ipc.ArrowStreamWriter)

Aggregations

RowData (org.apache.flink.table.data.RowData)602 Test (org.junit.Test)201 GenericRowData (org.apache.flink.table.data.GenericRowData)178 ArrayList (java.util.ArrayList)109 RowType (org.apache.flink.table.types.logical.RowType)105 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)90 Watermark (org.apache.flink.streaming.api.watermark.Watermark)84 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)72 Transformation (org.apache.flink.api.dag.Transformation)70 Configuration (org.apache.flink.configuration.Configuration)68 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)67 List (java.util.List)65 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)54 DataType (org.apache.flink.table.types.DataType)52 Map (java.util.Map)42 LogicalType (org.apache.flink.table.types.logical.LogicalType)41 TableException (org.apache.flink.table.api.TableException)34 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)33 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)32 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)31