use of org.apache.flink.api.common.serialization.SimpleStringSchema in project flink by apache.
the class PulsarSinkITCase method writeRecordsToPulsar.
@ParameterizedTest
@EnumSource(DeliveryGuarantee.class)
void writeRecordsToPulsar(DeliveryGuarantee guarantee) throws Exception {
// A random topic with partition 1.
String topic = randomAlphabetic(8);
operator().createTopic(topic, 4);
int counts = ThreadLocalRandom.current().nextInt(100, 200);
ControlSource source = new ControlSource(sharedObjects, operator(), topic, guarantee, counts, Duration.ofMinutes(5));
PulsarSink<String> sink = PulsarSink.builder().setServiceUrl(operator().serviceUrl()).setAdminUrl(operator().adminUrl()).setDeliveryGuarantee(guarantee).setTopics(topic).setSerializationSchema(flinkSchema(new SimpleStringSchema())).build();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(PARALLELISM);
env.enableCheckpointing(100L);
env.addSource(source).sinkTo(sink);
env.execute();
List<String> expectedRecords = source.getExpectedRecords();
List<String> consumedRecords = source.getConsumedRecords();
assertThat(consumedRecords).hasSameSizeAs(expectedRecords).containsExactlyInAnyOrderElementsOf(expectedRecords);
}
use of org.apache.flink.api.common.serialization.SimpleStringSchema in project aws-doc-sdk-examples by awsdocs.
the class StreamingJob method createFirehoseSinkFromStaticConfig.
private static FlinkKinesisFirehoseProducer<String> createFirehoseSinkFromStaticConfig() {
/*
* com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants
* lists of all of the properties that firehose sink can be configured with.
*/
Properties outputProperties = new Properties();
outputProperties.setProperty(ConsumerConfigConstants.AWS_REGION, region);
FlinkKinesisFirehoseProducer<String> sink = new FlinkKinesisFirehoseProducer<>(outputStreamName, new SimpleStringSchema(), outputProperties);
ProducerConfigConstants config = new ProducerConfigConstants();
return sink;
}
use of org.apache.flink.api.common.serialization.SimpleStringSchema in project aws-doc-sdk-examples by awsdocs.
the class StreamingJob method createSinkFromApplicationProperties.
private static FlinkKinesisProducer<String> createSinkFromApplicationProperties() throws IOException {
Map<String, Properties> applicationProperties = KinesisAnalyticsRuntime.getApplicationProperties();
FlinkKinesisProducer<String> sink = new FlinkKinesisProducer<>(new SimpleStringSchema(), applicationProperties.get("ProducerConfigProperties"));
sink.setDefaultStream(outputStreamName);
sink.setDefaultPartition("0");
return sink;
}
use of org.apache.flink.api.common.serialization.SimpleStringSchema in project aws-doc-sdk-examples by awsdocs.
the class StreamingJob method createFirehoseSinkFromApplicationProperties.
private static FlinkKinesisFirehoseProducer<String> createFirehoseSinkFromApplicationProperties() throws IOException {
/*
* com.amazonaws.services.kinesisanalytics.flink.connectors.config.ProducerConfigConstants
* lists of all of the properties that firehose sink can be configured with.
*/
Map<String, Properties> applicationProperties = KinesisAnalyticsRuntime.getApplicationProperties();
FlinkKinesisFirehoseProducer<String> sink = new FlinkKinesisFirehoseProducer<>(outputStreamName, new SimpleStringSchema(), applicationProperties.get("ProducerConfigProperties"));
return sink;
}
use of org.apache.flink.api.common.serialization.SimpleStringSchema in project aws-doc-sdk-examples by awsdocs.
the class StreamingJob method createSourceFromStaticConfig.
private static DataStream<String> createSourceFromStaticConfig(StreamExecutionEnvironment env) {
Properties inputProperties = new Properties();
inputProperties.setProperty(ConsumerConfigConstants.AWS_REGION, region);
inputProperties.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "LATEST");
return env.addSource(new FlinkKinesisConsumer<>(inputStreamName, new SimpleStringSchema(), inputProperties));
}
Aggregations