use of org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer in project flink by apache.
the class TestAvroConsumerConfluent method main.
public static void main(String[] args) throws Exception {
// parse input arguments
final ParameterTool parameterTool = ParameterTool.fromArgs(args);
if (parameterTool.getNumberOfParameters() < 6) {
System.out.println("Missing parameters!\n" + "Usage: Kafka --input-topic <topic> --output-string-topic <topic> --output-avro-topic <topic> " + "--bootstrap.servers <kafka brokers> " + "--schema-registry-url <confluent schema registry> --group.id <some id>");
return;
}
Properties config = new Properties();
config.setProperty("bootstrap.servers", parameterTool.getRequired("bootstrap.servers"));
config.setProperty("group.id", parameterTool.getRequired("group.id"));
String schemaRegistryUrl = parameterTool.getRequired("schema-registry-url");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<User> input = env.addSource(new FlinkKafkaConsumer<>(parameterTool.getRequired("input-topic"), ConfluentRegistryAvroDeserializationSchema.forSpecific(User.class, schemaRegistryUrl), config).setStartFromEarliest());
SingleOutputStreamOperator<String> mapToString = input.map((MapFunction<User, String>) SpecificRecordBase::toString);
KafkaSink<String> stringSink = KafkaSink.<String>builder().setBootstrapServers(config.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)).setRecordSerializer(KafkaRecordSerializationSchema.builder().setValueSerializationSchema(new SimpleStringSchema()).setTopic(parameterTool.getRequired("output-string-topic")).build()).setKafkaProducerConfig(config).build();
mapToString.sinkTo(stringSink);
KafkaSink<User> avroSink = KafkaSink.<User>builder().setBootstrapServers(config.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)).setRecordSerializer(KafkaRecordSerializationSchema.builder().setValueSerializationSchema(ConfluentRegistryAvroSerializationSchema.forSpecific(User.class, parameterTool.getRequired("output-subject"), schemaRegistryUrl)).setTopic(parameterTool.getRequired("output-avro-topic")).build()).build();
input.sinkTo(avroSink);
env.execute("Kafka Confluent Schema Registry AVRO Example");
}
Aggregations