Search in sources :

Example 16 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class RegistryAvroFormatFactoryTest method testDeserializationSchemaWithOptionalProperties.

@Test
public void testDeserializationSchemaWithOptionalProperties() {
    final AvroRowDataDeserializationSchema expectedDeser = new AvroRowDataDeserializationSchema(ConfluentRegistryAvroDeserializationSchema.forGeneric(AvroSchemaConverter.convertToSchema(ROW_TYPE), REGISTRY_URL, EXPECTED_OPTIONAL_PROPERTIES), AvroToRowDataConverters.createRowConverter(ROW_TYPE), InternalTypeInfo.of(ROW_TYPE));
    final DynamicTableSource actualSource = createTableSource(SCHEMA, getOptionalProperties());
    assertThat(actualSource, instanceOf(TestDynamicTableFactory.DynamicTableSourceMock.class));
    TestDynamicTableFactory.DynamicTableSourceMock scanSourceMock = (TestDynamicTableFactory.DynamicTableSourceMock) actualSource;
    DeserializationSchema<RowData> actualDeser = scanSourceMock.valueFormat.createRuntimeDecoder(ScanRuntimeProviderContext.INSTANCE, SCHEMA.toPhysicalRowDataType());
    assertEquals(expectedDeser, actualDeser);
}
Also used : RowData(org.apache.flink.table.data.RowData) AvroRowDataDeserializationSchema(org.apache.flink.formats.avro.AvroRowDataDeserializationSchema) TestDynamicTableFactory(org.apache.flink.table.factories.TestDynamicTableFactory) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.Test)

Example 17 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AvroFormatFactory method createDecodingFormat.

@Override
public DecodingFormat<DeserializationSchema<RowData>> createDecodingFormat(DynamicTableFactory.Context context, ReadableConfig formatOptions) {
    FactoryUtil.validateFactoryOptions(this, formatOptions);
    return new ProjectableDecodingFormat<DeserializationSchema<RowData>>() {

        @Override
        public DeserializationSchema<RowData> createRuntimeDecoder(DynamicTableSource.Context context, DataType physicalDataType, int[][] projections) {
            final DataType producedDataType = Projection.of(projections).project(physicalDataType);
            final RowType rowType = (RowType) producedDataType.getLogicalType();
            final TypeInformation<RowData> rowDataTypeInfo = context.createTypeInformation(producedDataType);
            return new AvroRowDataDeserializationSchema(rowType, rowDataTypeInfo);
        }

        @Override
        public ChangelogMode getChangelogMode() {
            return ChangelogMode.insertOnly();
        }
    };
}
Also used : RowData(org.apache.flink.table.data.RowData) ProjectableDecodingFormat(org.apache.flink.table.connector.format.ProjectableDecodingFormat) DataType(org.apache.flink.table.types.DataType) RowType(org.apache.flink.table.types.logical.RowType)

Example 18 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AvroBulkFormatTest method testRestoreReader.

@Test
public void testRestoreReader() throws IOException {
    AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat = new AvroBulkFormatTestUtils.TestingAvroBulkFormat();
    long splitLength = tmpFile.length() / 3;
    String splitId = UUID.randomUUID().toString();
    FileSourceSplit split = new FileSourceSplit(splitId, new Path(tmpFile.toString()), splitLength * 2, tmpFile.length());
    BulkFormat.Reader<RowData> reader = bulkFormat.createReader(new Configuration(), split);
    long offset1 = assertBatch(reader, new BatchInfo(3, 5));
    assertBatch(reader, new BatchInfo(5, 6));
    assertThat(reader.readBatch()).isNull();
    reader.close();
    split = new FileSourceSplit(splitId, new Path(tmpFile.toString()), splitLength * 2, tmpFile.length(), StringUtils.EMPTY_STRING_ARRAY, new CheckpointedPosition(offset1, 1));
    reader = bulkFormat.restoreReader(new Configuration(), split);
    long offset2 = assertBatch(reader, new BatchInfo(3, 5), 1);
    assertBatch(reader, new BatchInfo(5, 6));
    assertThat(reader.readBatch()).isNull();
    reader.close();
    assertThat(offset2).isEqualTo(offset1);
}
Also used : Path(org.apache.flink.core.fs.Path) FileSourceSplit(org.apache.flink.connector.file.src.FileSourceSplit) Configuration(org.apache.flink.configuration.Configuration) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) CheckpointedPosition(org.apache.flink.connector.file.src.util.CheckpointedPosition) BulkFormat(org.apache.flink.connector.file.src.reader.BulkFormat) Test(org.junit.jupiter.api.Test)

Example 19 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AvroBulkFormatTest method assertSplit.

private void assertSplit(AvroBulkFormatTestUtils.TestingAvroBulkFormat bulkFormat, List<SplitInfo> splitInfos) throws IOException {
    for (SplitInfo splitInfo : splitInfos) {
        FileSourceSplit split = new FileSourceSplit(UUID.randomUUID().toString(), new Path(tmpFile.toString()), splitInfo.start, splitInfo.end - splitInfo.start);
        BulkFormat.Reader<RowData> reader = bulkFormat.createReader(new Configuration(), split);
        List<Long> offsets = new ArrayList<>();
        for (BatchInfo batch : splitInfo.batches) {
            offsets.add(assertBatch(reader, batch));
        }
        assertThat(reader.readBatch()).isNull();
        for (int j = 1; j < offsets.size(); j++) {
            assertThat(offsets.get(j - 1) < offsets.get(j)).isTrue();
        }
        reader.close();
    }
}
Also used : Path(org.apache.flink.core.fs.Path) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) FileSourceSplit(org.apache.flink.connector.file.src.FileSourceSplit) Configuration(org.apache.flink.configuration.Configuration) ArrayList(java.util.ArrayList) BulkFormat(org.apache.flink.connector.file.src.reader.BulkFormat)

Example 20 with RowData

use of org.apache.flink.table.data.RowData in project flink by apache.

the class AvroRowDataDeSerializationSchemaTest method createDeserializationSchema.

private AvroRowDataDeserializationSchema createDeserializationSchema(DataType dataType) throws Exception {
    final RowType rowType = (RowType) dataType.getLogicalType();
    final TypeInformation<RowData> typeInfo = InternalTypeInfo.of(rowType);
    AvroRowDataDeserializationSchema deserializationSchema = new AvroRowDataDeserializationSchema(rowType, typeInfo);
    deserializationSchema.open(null);
    return deserializationSchema;
}
Also used : GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) RowType(org.apache.flink.table.types.logical.RowType)

Aggregations

RowData (org.apache.flink.table.data.RowData)602 Test (org.junit.Test)201 GenericRowData (org.apache.flink.table.data.GenericRowData)178 ArrayList (java.util.ArrayList)109 RowType (org.apache.flink.table.types.logical.RowType)105 JoinedRowData (org.apache.flink.table.data.utils.JoinedRowData)90 Watermark (org.apache.flink.streaming.api.watermark.Watermark)84 ConcurrentLinkedQueue (java.util.concurrent.ConcurrentLinkedQueue)72 Transformation (org.apache.flink.api.dag.Transformation)70 Configuration (org.apache.flink.configuration.Configuration)68 BinaryRowData (org.apache.flink.table.data.binary.BinaryRowData)67 List (java.util.List)65 ExecEdge (org.apache.flink.table.planner.plan.nodes.exec.ExecEdge)54 DataType (org.apache.flink.table.types.DataType)52 Map (java.util.Map)42 LogicalType (org.apache.flink.table.types.logical.LogicalType)41 TableException (org.apache.flink.table.api.TableException)34 OneInputTransformation (org.apache.flink.streaming.api.transformations.OneInputTransformation)33 RowDataKeySelector (org.apache.flink.table.runtime.keyselector.RowDataKeySelector)32 OperatorSubtaskState (org.apache.flink.runtime.checkpoint.OperatorSubtaskState)31