use of org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock in project flink by apache.
the class KafkaDynamicTableFactoryTest method testTableSinkWithParallelism.
@Test
public void testTableSinkWithParallelism() {
final Map<String, String> modifiedOptions = getModifiedOptions(getBasicSinkOptions(), options -> options.put("sink.parallelism", "100"));
KafkaDynamicSink actualSink = (KafkaDynamicSink) createTableSink(SCHEMA, modifiedOptions);
final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat = new EncodingFormatMock(",");
final DynamicTableSink expectedSink = createExpectedSink(SCHEMA_DATA_TYPE, null, valueEncodingFormat, new int[0], new int[] { 0, 1, 2 }, null, TOPIC, KAFKA_SINK_PROPERTIES, new FlinkFixedPartitioner<>(), DeliveryGuarantee.EXACTLY_ONCE, 100, "kafka-sink");
assertThat(actualSink).isEqualTo(expectedSink);
final DynamicTableSink.SinkRuntimeProvider provider = actualSink.getSinkRuntimeProvider(new SinkRuntimeProviderContext(false));
assertThat(provider).isInstanceOf(SinkV2Provider.class);
final SinkV2Provider sinkProvider = (SinkV2Provider) provider;
assertThat(sinkProvider.getParallelism().isPresent()).isTrue();
assertThat((long) sinkProvider.getParallelism().get()).isEqualTo(100);
}
use of org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock in project flink by apache.
the class KafkaDynamicTableFactoryTest method testTableSinkWithKeyValue.
@Test
public void testTableSinkWithKeyValue() {
final Map<String, String> modifiedOptions = getModifiedOptions(getKeyValueOptions(), options -> {
options.put("sink.delivery-guarantee", "exactly-once");
options.put("sink.transactional-id-prefix", "kafka-sink");
});
final DynamicTableSink actualSink = createTableSink(SCHEMA, modifiedOptions);
final KafkaDynamicSink actualKafkaSink = (KafkaDynamicSink) actualSink;
// initialize stateful testing formats
actualKafkaSink.getSinkRuntimeProvider(new SinkRuntimeProviderContext(false));
final EncodingFormatMock keyEncodingFormat = new EncodingFormatMock("#");
keyEncodingFormat.consumedDataType = DataTypes.ROW(DataTypes.FIELD(NAME, DataTypes.STRING().notNull())).notNull();
final EncodingFormatMock valueEncodingFormat = new EncodingFormatMock("|");
valueEncodingFormat.consumedDataType = DataTypes.ROW(DataTypes.FIELD(COUNT, DataTypes.DECIMAL(38, 18)), DataTypes.FIELD(TIME, DataTypes.TIMESTAMP(3))).notNull();
final DynamicTableSink expectedSink = createExpectedSink(SCHEMA_DATA_TYPE, keyEncodingFormat, valueEncodingFormat, new int[] { 0 }, new int[] { 1, 2 }, null, TOPIC, KAFKA_FINAL_SINK_PROPERTIES, new FlinkFixedPartitioner<>(), DeliveryGuarantee.EXACTLY_ONCE, null, "kafka-sink");
assertThat(actualSink).isEqualTo(expectedSink);
}
use of org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock in project flink by apache.
the class KafkaDynamicTableFactoryTest method testTableSinkSemanticTranslation.
@Test
public void testTableSinkSemanticTranslation() {
final List<String> semantics = ImmutableList.of("exactly-once", "at-least-once", "none");
final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat = new EncodingFormatMock(",");
for (final String semantic : semantics) {
final Map<String, String> modifiedOptions = getModifiedOptions(getBasicSinkOptions(), options -> {
options.put("sink.semantic", semantic);
options.put("sink.transactional-id-prefix", "kafka-sink");
});
final DynamicTableSink actualSink = createTableSink(SCHEMA, modifiedOptions);
final DynamicTableSink expectedSink = createExpectedSink(SCHEMA_DATA_TYPE, null, valueEncodingFormat, new int[0], new int[] { 0, 1, 2 }, null, TOPIC, KAFKA_SINK_PROPERTIES, new FlinkFixedPartitioner<>(), DeliveryGuarantee.valueOf(semantic.toUpperCase().replace("-", "_")), null, "kafka-sink");
assertThat(actualSink).isEqualTo(expectedSink);
}
}
use of org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock in project flink by apache.
the class FactoryUtilTest method testAllOptions.
@Test
public void testAllOptions() {
final Map<String, String> options = createAllOptions();
final DynamicTableSource actualSource = createTableSource(SCHEMA, options);
final DynamicTableSource expectedSource = new DynamicTableSourceMock("MyTarget", null, new DecodingFormatMock(",", false), new DecodingFormatMock("|", true));
assertThat(actualSource).isEqualTo(expectedSource);
final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
final DynamicTableSink expectedSink = new DynamicTableSinkMock("MyTarget", 1000L, new EncodingFormatMock(","), new EncodingFormatMock("|"));
assertThat(actualSink).isEqualTo(expectedSink);
}
use of org.apache.flink.table.factories.TestFormatFactory.EncodingFormatMock in project flink by apache.
the class KafkaDynamicTableFactoryTest method testTableSink.
@Test
public void testTableSink() {
final Map<String, String> modifiedOptions = getModifiedOptions(getBasicSinkOptions(), options -> {
options.put("sink.delivery-guarantee", "exactly-once");
options.put("sink.transactional-id-prefix", "kafka-sink");
});
final DynamicTableSink actualSink = createTableSink(SCHEMA, modifiedOptions);
final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat = new EncodingFormatMock(",");
final DynamicTableSink expectedSink = createExpectedSink(SCHEMA_DATA_TYPE, null, valueEncodingFormat, new int[0], new int[] { 0, 1, 2 }, null, TOPIC, KAFKA_SINK_PROPERTIES, new FlinkFixedPartitioner<>(), DeliveryGuarantee.EXACTLY_ONCE, null, "kafka-sink");
assertThat(actualSink).isEqualTo(expectedSink);
// Test kafka producer.
final KafkaDynamicSink actualKafkaSink = (KafkaDynamicSink) actualSink;
DynamicTableSink.SinkRuntimeProvider provider = actualKafkaSink.getSinkRuntimeProvider(new SinkRuntimeProviderContext(false));
assertThat(provider).isInstanceOf(SinkV2Provider.class);
final SinkV2Provider sinkProvider = (SinkV2Provider) provider;
final Sink<RowData> sinkFunction = sinkProvider.createSink();
assertThat(sinkFunction).isInstanceOf(KafkaSink.class);
}
Aggregations