Search in sources :

Example 16 with DynamicTableSource

use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.

the class KafkaDynamicTableFactoryTest method testTableSource.

@Test
public void testTableSource() {
    final DynamicTableSource actualSource = createTableSource(SCHEMA, getBasicSourceOptions());
    final KafkaDynamicSource actualKafkaSource = (KafkaDynamicSource) actualSource;
    final Map<KafkaTopicPartition, Long> specificOffsets = new HashMap<>();
    specificOffsets.put(new KafkaTopicPartition(TOPIC, PARTITION_0), OFFSET_0);
    specificOffsets.put(new KafkaTopicPartition(TOPIC, PARTITION_1), OFFSET_1);
    final DecodingFormat<DeserializationSchema<RowData>> valueDecodingFormat = new DecodingFormatMock(",", true);
    // Test scan source equals
    final KafkaDynamicSource expectedKafkaSource = createExpectedScanSource(SCHEMA_DATA_TYPE, null, valueDecodingFormat, new int[0], new int[] { 0, 1, 2 }, null, Collections.singletonList(TOPIC), null, KAFKA_SOURCE_PROPERTIES, StartupMode.SPECIFIC_OFFSETS, specificOffsets, 0);
    assertThat(actualKafkaSource).isEqualTo(expectedKafkaSource);
    ScanTableSource.ScanRuntimeProvider provider = actualKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    assertKafkaSource(provider);
}
Also used : ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) HashMap(java.util.HashMap) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) DeserializationSchema(org.apache.flink.api.common.serialization.DeserializationSchema) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 17 with DynamicTableSource

use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.

the class KafkaDynamicTableFactoryTest method testTableSourceWithPattern.

@Test
public void testTableSourceWithPattern() {
    final Map<String, String> modifiedOptions = getModifiedOptions(getBasicSourceOptions(), options -> {
        options.remove("topic");
        options.put("topic-pattern", TOPIC_REGEX);
        options.put("scan.startup.mode", ScanStartupMode.EARLIEST_OFFSET.toString());
        options.remove("scan.startup.specific-offsets");
    });
    final DynamicTableSource actualSource = createTableSource(SCHEMA, modifiedOptions);
    final Map<KafkaTopicPartition, Long> specificOffsets = new HashMap<>();
    DecodingFormat<DeserializationSchema<RowData>> valueDecodingFormat = new DecodingFormatMock(",", true);
    // Test scan source equals
    final KafkaDynamicSource expectedKafkaSource = createExpectedScanSource(SCHEMA_DATA_TYPE, null, valueDecodingFormat, new int[0], new int[] { 0, 1, 2 }, null, null, Pattern.compile(TOPIC_REGEX), KAFKA_SOURCE_PROPERTIES, StartupMode.EARLIEST, specificOffsets, 0);
    final KafkaDynamicSource actualKafkaSource = (KafkaDynamicSource) actualSource;
    assertThat(actualKafkaSource).isEqualTo(expectedKafkaSource);
    ScanTableSource.ScanRuntimeProvider provider = actualKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    assertKafkaSource(provider);
}
Also used : HashMap(java.util.HashMap) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) DeserializationSchema(org.apache.flink.api.common.serialization.DeserializationSchema) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 18 with DynamicTableSource

use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.

the class KafkaDynamicTableFactoryTest method testTableSourceWithKeyValueAndMetadata.

@Test
public void testTableSourceWithKeyValueAndMetadata() {
    final Map<String, String> options = getKeyValueOptions();
    options.put("value.test-format.readable-metadata", "metadata_1:INT, metadata_2:STRING");
    final DynamicTableSource actualSource = createTableSource(SCHEMA_WITH_METADATA, options);
    final KafkaDynamicSource actualKafkaSource = (KafkaDynamicSource) actualSource;
    // initialize stateful testing formats
    actualKafkaSource.applyReadableMetadata(Arrays.asList("timestamp", "value.metadata_2"), SCHEMA_WITH_METADATA.toSourceRowDataType());
    actualKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    final DecodingFormatMock expectedKeyFormat = new DecodingFormatMock("#", false, ChangelogMode.insertOnly(), Collections.emptyMap());
    expectedKeyFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(NAME, DataTypes.STRING())).notNull();
    final Map<String, DataType> expectedReadableMetadata = new HashMap<>();
    expectedReadableMetadata.put("metadata_1", DataTypes.INT());
    expectedReadableMetadata.put("metadata_2", DataTypes.STRING());
    final DecodingFormatMock expectedValueFormat = new DecodingFormatMock("|", false, ChangelogMode.insertOnly(), expectedReadableMetadata);
    expectedValueFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(COUNT, DataTypes.DECIMAL(38, 18)), DataTypes.FIELD("metadata_2", DataTypes.STRING())).notNull();
    expectedValueFormat.metadataKeys = Collections.singletonList("metadata_2");
    final KafkaDynamicSource expectedKafkaSource = createExpectedScanSource(SCHEMA_WITH_METADATA.toPhysicalRowDataType(), expectedKeyFormat, expectedValueFormat, new int[] { 0 }, new int[] { 1 }, null, Collections.singletonList(TOPIC), null, KAFKA_FINAL_SOURCE_PROPERTIES, StartupMode.GROUP_OFFSETS, Collections.emptyMap(), 0);
    expectedKafkaSource.producedDataType = SCHEMA_WITH_METADATA.toSourceRowDataType();
    expectedKafkaSource.metadataKeys = Collections.singletonList("timestamp");
    assertThat(actualSource).isEqualTo(expectedKafkaSource);
}
Also used : HashMap(java.util.HashMap) DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) DataType(org.apache.flink.table.types.DataType) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 19 with DynamicTableSource

use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.

the class KafkaDynamicTableFactoryTest method testTableSourceWithKeyValue.

@Test
public void testTableSourceWithKeyValue() {
    final DynamicTableSource actualSource = createTableSource(SCHEMA, getKeyValueOptions());
    final KafkaDynamicSource actualKafkaSource = (KafkaDynamicSource) actualSource;
    // initialize stateful testing formats
    actualKafkaSource.getScanRuntimeProvider(ScanRuntimeProviderContext.INSTANCE);
    final DecodingFormatMock keyDecodingFormat = new DecodingFormatMock("#", false);
    keyDecodingFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(NAME, DataTypes.STRING().notNull())).notNull();
    final DecodingFormatMock valueDecodingFormat = new DecodingFormatMock("|", false);
    valueDecodingFormat.producedDataType = DataTypes.ROW(DataTypes.FIELD(COUNT, DataTypes.DECIMAL(38, 18)), DataTypes.FIELD(TIME, DataTypes.TIMESTAMP(3))).notNull();
    final KafkaDynamicSource expectedKafkaSource = createExpectedScanSource(SCHEMA_DATA_TYPE, keyDecodingFormat, valueDecodingFormat, new int[] { 0 }, new int[] { 1, 2 }, null, Collections.singletonList(TOPIC), null, KAFKA_FINAL_SOURCE_PROPERTIES, StartupMode.GROUP_OFFSETS, Collections.emptyMap(), 0);
    assertThat(actualSource).isEqualTo(expectedKafkaSource);
}
Also used : DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) Test(org.junit.jupiter.api.Test) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest)

Example 20 with DynamicTableSource

use of org.apache.flink.table.connector.source.DynamicTableSource in project flink by apache.

the class JsonFormatFactoryTest method createTableSource.

private TestDynamicTableFactory.DynamicTableSourceMock createTableSource(Map<String, String> options) {
    final DynamicTableSource actualSource = FactoryMocks.createTableSource(SCHEMA, options);
    assertThat(actualSource).isInstanceOf(TestDynamicTableFactory.DynamicTableSourceMock.class);
    return (TestDynamicTableFactory.DynamicTableSourceMock) actualSource;
}
Also used : TestDynamicTableFactory(org.apache.flink.table.factories.TestDynamicTableFactory) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource)

Aggregations

DynamicTableSource (org.apache.flink.table.connector.source.DynamicTableSource)55 Test (org.junit.Test)24 DynamicTableSink (org.apache.flink.table.connector.sink.DynamicTableSink)12 TestDynamicTableFactory (org.apache.flink.table.factories.TestDynamicTableFactory)12 Test (org.junit.jupiter.api.Test)10 RowData (org.apache.flink.table.data.RowData)9 DecodingFormatMock (org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock)8 TableSourceTable (org.apache.flink.table.planner.plan.schema.TableSourceTable)8 ResolvedSchema (org.apache.flink.table.catalog.ResolvedSchema)7 HashMap (java.util.HashMap)5 Configuration (org.apache.flink.configuration.Configuration)5 ScanTableSource (org.apache.flink.table.connector.source.ScanTableSource)5 ParameterizedTest (org.junit.jupiter.params.ParameterizedTest)5 ArrayList (java.util.ArrayList)4 JdbcConnectorOptions (org.apache.flink.connector.jdbc.internal.options.JdbcConnectorOptions)4 JdbcLookupOptions (org.apache.flink.connector.jdbc.internal.options.JdbcLookupOptions)4 CatalogTable (org.apache.flink.table.catalog.CatalogTable)4 SourceAbilitySpec (org.apache.flink.table.planner.plan.abilities.source.SourceAbilitySpec)4 List (java.util.List)3 LogicalTableScan (org.apache.calcite.rel.logical.LogicalTableScan)3