Search in sources :

Example 1 with KafkaSource

use of org.apache.flink.connector.kafka.source.KafkaSource in project flink by apache.

the class KafkaDynamicTableFactoryTest method assertKafkaSource.

private KafkaSource<?> assertKafkaSource(ScanTableSource.ScanRuntimeProvider provider) {
    assertThat(provider).isInstanceOf(DataStreamScanProvider.class);
    final DataStreamScanProvider dataStreamScanProvider = (DataStreamScanProvider) provider;
    final Transformation<RowData> transformation = dataStreamScanProvider.produceDataStream(n -> Optional.empty(), StreamExecutionEnvironment.createLocalEnvironment()).getTransformation();
    assertThat(transformation).isInstanceOf(SourceTransformation.class);
    SourceTransformation<RowData, KafkaPartitionSplit, KafkaSourceEnumState> sourceTransformation = (SourceTransformation<RowData, KafkaPartitionSplit, KafkaSourceEnumState>) transformation;
    assertThat(sourceTransformation.getSource()).isInstanceOf(KafkaSource.class);
    return (KafkaSource<?>) sourceTransformation.getSource();
}
Also used : DataType(org.apache.flink.table.types.DataType) ConfigOptions(org.apache.flink.configuration.ConfigOptions) Arrays(java.util.Arrays) Assertions.assertThat(org.assertj.core.api.Assertions.assertThat) ResolvedSchema(org.apache.flink.table.catalog.ResolvedSchema) SourceTransformation(org.apache.flink.streaming.api.transformations.SourceTransformation) DataStreamScanProvider(org.apache.flink.table.connector.source.DataStreamScanProvider) DecodingFormat(org.apache.flink.table.connector.format.DecodingFormat) ExtendWith(org.junit.jupiter.api.extension.ExtendWith) Map(java.util.Map) FactoryMocks.createTableSink(org.apache.flink.table.factories.utils.FactoryMocks.createTableSink) FlinkFixedPartitioner(org.apache.flink.streaming.connectors.kafka.partitioner.FlinkFixedPartitioner) ConfluentRegistryAvroSerializationSchema(org.apache.flink.formats.avro.registry.confluent.ConfluentRegistryAvroSerializationSchema) DynamicTableSource(org.apache.flink.table.connector.source.DynamicTableSource) DynamicTableSink(org.apache.flink.table.connector.sink.DynamicTableSink) KafkaTopicPartition(org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition) Set(java.util.Set) EncodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) AVRO_CONFLUENT(org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.AVRO_CONFLUENT) ResolvedExpressionMock(org.apache.flink.table.expressions.utils.ResolvedExpressionMock) AvroRowDataSerializationSchema(org.apache.flink.formats.avro.AvroRowDataSerializationSchema) Test(org.junit.jupiter.api.Test) List(java.util.List) FactoryUtil(org.apache.flink.table.factories.FactoryUtil) ValidationException(org.apache.flink.table.api.ValidationException) FlinkAssertions.containsCause(org.apache.flink.core.testutils.FlinkAssertions.containsCause) Optional(java.util.Optional) Pattern(java.util.regex.Pattern) ScanRuntimeProviderContext(org.apache.flink.table.runtime.connector.source.ScanRuntimeProviderContext) SerializationSchema(org.apache.flink.api.common.serialization.SerializationSchema) StreamExecutionEnvironment(org.apache.flink.streaming.api.environment.StreamExecutionEnvironment) TestFormatFactory(org.apache.flink.table.factories.TestFormatFactory) DeliveryGuarantee(org.apache.flink.connector.base.DeliveryGuarantee) EncodingFormat(org.apache.flink.table.connector.format.EncodingFormat) Sink(org.apache.flink.api.connector.sink2.Sink) ChangelogMode(org.apache.flink.table.connector.ChangelogMode) Column(org.apache.flink.table.catalog.Column) HashMap(java.util.HashMap) RowType(org.apache.flink.table.types.logical.RowType) ScanTableSource(org.apache.flink.table.connector.source.ScanTableSource) SinkV2Provider(org.apache.flink.table.connector.sink.SinkV2Provider) HashSet(java.util.HashSet) TestLoggerExtension(org.apache.flink.util.TestLoggerExtension) PROPERTIES_PREFIX(org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.PROPERTIES_PREFIX) KafkaSink(org.apache.flink.connector.kafka.sink.KafkaSink) Assertions.assertThatThrownBy(org.assertj.core.api.Assertions.assertThatThrownBy) Assertions.assertThatExceptionOfType(org.assertj.core.api.Assertions.assertThatExceptionOfType) RowDataToAvroConverters(org.apache.flink.formats.avro.RowDataToAvroConverters) KafkaSourceOptions(org.apache.flink.connector.kafka.source.KafkaSourceOptions) FactoryMocks.createTableSource(org.apache.flink.table.factories.utils.FactoryMocks.createTableSource) Nullable(javax.annotation.Nullable) ValueSource(org.junit.jupiter.params.provider.ValueSource) DEBEZIUM_AVRO_CONFLUENT(org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptionsUtil.DEBEZIUM_AVRO_CONFLUENT) RowData(org.apache.flink.table.data.RowData) Properties(java.util.Properties) WatermarkSpec(org.apache.flink.table.catalog.WatermarkSpec) Configuration(org.apache.flink.configuration.Configuration) DataTypes(org.apache.flink.table.api.DataTypes) ScanStartupMode(org.apache.flink.streaming.connectors.kafka.table.KafkaConnectorOptions.ScanStartupMode) KafkaSourceEnumState(org.apache.flink.connector.kafka.source.enumerator.KafkaSourceEnumState) FlinkKafkaPartitioner(org.apache.flink.streaming.connectors.kafka.partitioner.FlinkKafkaPartitioner) DeserializationSchema(org.apache.flink.api.common.serialization.DeserializationSchema) Consumer(java.util.function.Consumer) StartupMode(org.apache.flink.streaming.connectors.kafka.config.StartupMode) ParameterizedTest(org.junit.jupiter.params.ParameterizedTest) KafkaSource(org.apache.flink.connector.kafka.source.KafkaSource) UniqueConstraint(org.apache.flink.table.catalog.UniqueConstraint) DecodingFormatMock(org.apache.flink.table.factories.TestFormatFactory.DecodingFormatMock) SinkRuntimeProviderContext(org.apache.flink.table.runtime.connector.sink.SinkRuntimeProviderContext) ImmutableList(org.apache.flink.shaded.guava30.com.google.common.collect.ImmutableList) KafkaSourceTestUtils(org.apache.flink.connector.kafka.source.KafkaSourceTestUtils) FactoryMocks(org.apache.flink.table.factories.utils.FactoryMocks) KafkaPartitionSplit(org.apache.flink.connector.kafka.source.split.KafkaPartitionSplit) DebeziumAvroSerializationSchema(org.apache.flink.formats.avro.registry.confluent.debezium.DebeziumAvroSerializationSchema) NullSource(org.junit.jupiter.params.provider.NullSource) Transformation(org.apache.flink.api.dag.Transformation) Collections(java.util.Collections) AvroSchemaConverter(org.apache.flink.formats.avro.typeutils.AvroSchemaConverter) KafkaPartitionSplit(org.apache.flink.connector.kafka.source.split.KafkaPartitionSplit) RowData(org.apache.flink.table.data.RowData) KafkaSource(org.apache.flink.connector.kafka.source.KafkaSource) KafkaSourceEnumState(org.apache.flink.connector.kafka.source.enumerator.KafkaSourceEnumState) DataStreamScanProvider(org.apache.flink.table.connector.source.DataStreamScanProvider) SourceTransformation(org.apache.flink.streaming.api.transformations.SourceTransformation)

Aggregations

Arrays (java.util.Arrays)1 Collections (java.util.Collections)1 HashMap (java.util.HashMap)1 HashSet (java.util.HashSet)1 List (java.util.List)1 Map (java.util.Map)1 Optional (java.util.Optional)1 Properties (java.util.Properties)1 Set (java.util.Set)1 Consumer (java.util.function.Consumer)1 Pattern (java.util.regex.Pattern)1 Nullable (javax.annotation.Nullable)1 DeserializationSchema (org.apache.flink.api.common.serialization.DeserializationSchema)1 SerializationSchema (org.apache.flink.api.common.serialization.SerializationSchema)1 Sink (org.apache.flink.api.connector.sink2.Sink)1 Transformation (org.apache.flink.api.dag.Transformation)1 ConfigOptions (org.apache.flink.configuration.ConfigOptions)1 Configuration (org.apache.flink.configuration.Configuration)1 DeliveryGuarantee (org.apache.flink.connector.base.DeliveryGuarantee)1 KafkaSink (org.apache.flink.connector.kafka.sink.KafkaSink)1