Search in sources :

Example 1 with WindowedSerde

use of io.confluent.examples.streams.utils.WindowedSerde in project kafka-streams-examples by confluentinc.

the class TopArticlesLambdaExample method buildTopArticlesStream.

static KafkaStreams buildTopArticlesStream(final String bootstrapServers, final String schemaRegistryUrl, final String stateDir) throws IOException {
    final Properties streamsConfiguration = new Properties();
    // Give the Streams application a unique name.  The name must be unique in the Kafka cluster
    // against which the application is run.
    streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, "top-articles-lambda-example");
    streamsConfiguration.put(StreamsConfig.CLIENT_ID_CONFIG, "top-articles-lambda-example-client");
    // Where to find Kafka broker(s).
    streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
    // Where to find the Confluent schema registry instance(s)
    streamsConfiguration.put(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
    // Specify default (de)serializers for record keys and for record values.
    streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
    streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, GenericAvroSerde.class);
    streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, stateDir);
    // Records should be flushed every 10 seconds. This is less than the default
    // in order to keep this example interactive.
    streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 10 * 1000);
    // Serdes used in this example
    final Serde<String> stringSerde = Serdes.String();
    final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
    final Serde<GenericRecord> keyAvroSerde = new GenericAvroSerde();
    keyAvroSerde.configure(serdeConfig, true);
    final Serde<GenericRecord> valueAvroSerde = new GenericAvroSerde();
    valueAvroSerde.configure(serdeConfig, false);
    final Serde<Windowed<String>> windowedStringSerde = new WindowedSerde<>(stringSerde);
    final StreamsBuilder builder = new StreamsBuilder();
    final KStream<byte[], GenericRecord> views = builder.stream(PAGE_VIEWS);
    final InputStream statsSchema = TopArticlesLambdaExample.class.getClassLoader().getResourceAsStream("avro/io/confluent/examples/streams/pageviewstats.avsc");
    final Schema schema = new Schema.Parser().parse(statsSchema);
    final KStream<GenericRecord, GenericRecord> articleViews = views.filter((dummy, record) -> isArticle(record)).map((dummy, article) -> {
        final GenericRecord clone = new GenericData.Record(article.getSchema());
        clone.put("user", "user");
        clone.put("page", article.get("page"));
        clone.put("industry", article.get("industry"));
        return new KeyValue<>(clone, clone);
    });
    final KTable<Windowed<GenericRecord>, Long> viewCounts = articleViews.groupByKey(Serialized.with(keyAvroSerde, valueAvroSerde)).windowedBy(TimeWindows.of(TimeUnit.MINUTES.toMillis(60))).count();
    final Comparator<GenericRecord> comparator = (o1, o2) -> (int) ((Long) o2.get("count") - (Long) o1.get("count"));
    final KTable<Windowed<String>, PriorityQueue<GenericRecord>> allViewCounts = viewCounts.groupBy(// the selector
    (windowedArticle, count) -> {
        // project on the industry field for key
        Windowed<String> windowedIndustry = new Windowed<>(windowedArticle.key().get("industry").toString(), windowedArticle.window());
        // add the page into the value
        GenericRecord viewStats = new GenericData.Record(schema);
        viewStats.put("page", windowedArticle.key().get("page"));
        viewStats.put("user", "user");
        viewStats.put("industry", windowedArticle.key().get("industry"));
        viewStats.put("count", count);
        return new KeyValue<>(windowedIndustry, viewStats);
    }, Serialized.with(windowedStringSerde, valueAvroSerde)).aggregate(// the initializer
    () -> new PriorityQueue<>(comparator), // the "add" aggregator
    (windowedIndustry, record, queue) -> {
        queue.add(record);
        return queue;
    }, // the "remove" aggregator
    (windowedIndustry, record, queue) -> {
        queue.remove(record);
        return queue;
    }, Materialized.with(windowedStringSerde, new PriorityQueueSerde<>(comparator, valueAvroSerde)));
    final int topN = 100;
    final KTable<Windowed<String>, String> topViewCounts = allViewCounts.mapValues(queue -> {
        final StringBuilder sb = new StringBuilder();
        for (int i = 0; i < topN; i++) {
            final GenericRecord record = queue.poll();
            if (record == null) {
                break;
            }
            sb.append(record.get("page").toString());
            sb.append("\n");
        }
        return sb.toString();
    });
    topViewCounts.toStream().to(TOP_NEWS_PER_INDUSTRY_TOPIC, Produced.with(windowedStringSerde, stringSerde));
    return new KafkaStreams(builder.build(), streamsConfiguration);
}
Also used : StreamsConfig(org.apache.kafka.streams.StreamsConfig) Produced(org.apache.kafka.streams.kstream.Produced) Serialized(org.apache.kafka.streams.kstream.Serialized) PriorityQueue(java.util.PriorityQueue) KStream(org.apache.kafka.streams.kstream.KStream) GenericAvroSerde(io.confluent.kafka.streams.serdes.avro.GenericAvroSerde) GenericData(org.apache.avro.generic.GenericData) Windowed(org.apache.kafka.streams.kstream.Windowed) Serde(org.apache.kafka.common.serialization.Serde) Map(java.util.Map) Serdes(org.apache.kafka.common.serialization.Serdes) WindowedSerde(io.confluent.examples.streams.utils.WindowedSerde) Utf8(org.apache.avro.util.Utf8) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) GenericRecord(org.apache.avro.generic.GenericRecord) KTable(org.apache.kafka.streams.kstream.KTable) Schema(org.apache.avro.Schema) Properties(java.util.Properties) PriorityQueueSerde(io.confluent.examples.streams.utils.PriorityQueueSerde) KeyValue(org.apache.kafka.streams.KeyValue) IOException(java.io.IOException) AbstractKafkaAvroSerDeConfig(io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig) TimeUnit(java.util.concurrent.TimeUnit) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Materialized(org.apache.kafka.streams.kstream.Materialized) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Comparator(java.util.Comparator) Collections(java.util.Collections) InputStream(java.io.InputStream) KeyValue(org.apache.kafka.streams.KeyValue) Schema(org.apache.avro.Schema) Properties(java.util.Properties) PriorityQueueSerde(io.confluent.examples.streams.utils.PriorityQueueSerde) GenericRecord(org.apache.avro.generic.GenericRecord) GenericRecord(org.apache.avro.generic.GenericRecord) WindowedSerde(io.confluent.examples.streams.utils.WindowedSerde) KafkaStreams(org.apache.kafka.streams.KafkaStreams) InputStream(java.io.InputStream) PriorityQueue(java.util.PriorityQueue) GenericData(org.apache.avro.generic.GenericData) Windowed(org.apache.kafka.streams.kstream.Windowed) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) GenericAvroSerde(io.confluent.kafka.streams.serdes.avro.GenericAvroSerde)

Aggregations

PriorityQueueSerde (io.confluent.examples.streams.utils.PriorityQueueSerde)1 WindowedSerde (io.confluent.examples.streams.utils.WindowedSerde)1 AbstractKafkaAvroSerDeConfig (io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig)1 GenericAvroSerde (io.confluent.kafka.streams.serdes.avro.GenericAvroSerde)1 IOException (java.io.IOException)1 InputStream (java.io.InputStream)1 Collections (java.util.Collections)1 Comparator (java.util.Comparator)1 Map (java.util.Map)1 PriorityQueue (java.util.PriorityQueue)1 Properties (java.util.Properties)1 TimeUnit (java.util.concurrent.TimeUnit)1 Schema (org.apache.avro.Schema)1 GenericData (org.apache.avro.generic.GenericData)1 GenericRecord (org.apache.avro.generic.GenericRecord)1 Utf8 (org.apache.avro.util.Utf8)1 Serde (org.apache.kafka.common.serialization.Serde)1 Serdes (org.apache.kafka.common.serialization.Serdes)1 KafkaStreams (org.apache.kafka.streams.KafkaStreams)1 KeyValue (org.apache.kafka.streams.KeyValue)1