Search in sources :

Example 1 with Deserializer

use of org.apache.kafka.common.serialization.Deserializer in project kafka by apache.

the class PageViewUntypedDemo method main.

public static void main(final String[] args) throws Exception {
    final Properties props = new Properties();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-pageview-untyped");
    props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, JsonTimestampExtractor.class);
    props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
    // setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
    final StreamsBuilder builder = new StreamsBuilder();
    final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
    final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
    final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
    final Consumed<String, JsonNode> consumed = Consumed.with(Serdes.String(), jsonSerde);
    final KStream<String, JsonNode> views = builder.stream("streams-pageview-input", consumed);
    final KTable<String, JsonNode> users = builder.table("streams-userprofile-input", consumed);
    final KTable<String, String> userRegions = users.mapValues(record -> record.get("region").textValue());
    final Duration duration24Hours = Duration.ofHours(24);
    final KStream<JsonNode, JsonNode> regionCount = views.leftJoin(userRegions, (view, region) -> {
        final ObjectNode jNode = JsonNodeFactory.instance.objectNode();
        return (JsonNode) jNode.put("user", view.get("user").textValue()).put("page", view.get("page").textValue()).put("region", region == null ? "UNKNOWN" : region);
    }).map((user, viewRegion) -> new KeyValue<>(viewRegion.get("region").textValue(), viewRegion)).groupByKey(Grouped.with(Serdes.String(), jsonSerde)).windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofDays(7), duration24Hours).advanceBy(Duration.ofSeconds(1))).count().toStream().map((key, value) -> {
        final ObjectNode keyNode = JsonNodeFactory.instance.objectNode();
        keyNode.put("window-start", key.window().start()).put("region", key.key());
        final ObjectNode valueNode = JsonNodeFactory.instance.objectNode();
        valueNode.put("count", value);
        return new KeyValue<>((JsonNode) keyNode, (JsonNode) valueNode);
    });
    // write to the result topic
    regionCount.to("streams-pageviewstats-untyped-output", Produced.with(jsonSerde, jsonSerde));
    final KafkaStreams streams = new KafkaStreams(builder.build(), props);
    streams.start();
    // usually the stream application would be running forever,
    // in this example we just let it run for some time and stop since the input data is finite.
    Thread.sleep(5000L);
    streams.close();
}
Also used : StreamsBuilder(org.apache.kafka.streams.StreamsBuilder) StreamsConfig(org.apache.kafka.streams.StreamsConfig) KTable(org.apache.kafka.streams.kstream.KTable) Properties(java.util.Properties) Produced(org.apache.kafka.streams.kstream.Produced) Consumed(org.apache.kafka.streams.kstream.Consumed) KeyValue(org.apache.kafka.streams.KeyValue) ConsumerConfig(org.apache.kafka.clients.consumer.ConsumerConfig) KStream(org.apache.kafka.streams.kstream.KStream) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) Grouped(org.apache.kafka.streams.kstream.Grouped) JsonSerializer(org.apache.kafka.connect.json.JsonSerializer) JsonDeserializer(org.apache.kafka.connect.json.JsonDeserializer) JsonNodeFactory(com.fasterxml.jackson.databind.node.JsonNodeFactory) Serde(org.apache.kafka.common.serialization.Serde) Serializer(org.apache.kafka.common.serialization.Serializer) TimeWindows(org.apache.kafka.streams.kstream.TimeWindows) Duration(java.time.Duration) Serdes(org.apache.kafka.common.serialization.Serdes) JsonNode(com.fasterxml.jackson.databind.JsonNode) Deserializer(org.apache.kafka.common.serialization.Deserializer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KafkaStreams(org.apache.kafka.streams.KafkaStreams) KeyValue(org.apache.kafka.streams.KeyValue) ObjectNode(com.fasterxml.jackson.databind.node.ObjectNode) JsonNode(com.fasterxml.jackson.databind.JsonNode) Duration(java.time.Duration) JsonSerializer(org.apache.kafka.connect.json.JsonSerializer) Properties(java.util.Properties) JsonDeserializer(org.apache.kafka.connect.json.JsonDeserializer) StreamsBuilder(org.apache.kafka.streams.StreamsBuilder)

Example 2 with Deserializer

use of org.apache.kafka.common.serialization.Deserializer in project druid by druid-io.

the class KafkaRecordSupplier method getKafkaConsumer.

private static KafkaConsumer<byte[], byte[]> getKafkaConsumer(ObjectMapper sortingMapper, Map<String, Object> consumerProperties) {
    final Map<String, Object> consumerConfigs = KafkaConsumerConfigs.getConsumerProperties();
    final Properties props = new Properties();
    addConsumerPropertiesFromConfig(props, sortingMapper, consumerProperties);
    props.putIfAbsent("isolation.level", "read_committed");
    props.putIfAbsent("group.id", StringUtils.format("kafka-supervisor-%s", IdUtils.getRandomId()));
    props.putAll(consumerConfigs);
    ClassLoader currCtxCl = Thread.currentThread().getContextClassLoader();
    try {
        Thread.currentThread().setContextClassLoader(KafkaRecordSupplier.class.getClassLoader());
        Deserializer keyDeserializerObject = getKafkaDeserializer(props, "key.deserializer");
        Deserializer valueDeserializerObject = getKafkaDeserializer(props, "value.deserializer");
        return new KafkaConsumer<>(props, keyDeserializerObject, valueDeserializerObject);
    } finally {
        Thread.currentThread().setContextClassLoader(currCtxCl);
    }
}
Also used : Deserializer(org.apache.kafka.common.serialization.Deserializer) ByteArrayDeserializer(org.apache.kafka.common.serialization.ByteArrayDeserializer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Properties(java.util.Properties)

Example 3 with Deserializer

use of org.apache.kafka.common.serialization.Deserializer in project kafka by apache.

the class ResetPartitionTimeIntegrationTest method verifyOutput.

private void verifyOutput(final String topic, final List<KeyValueTimestamp<String, String>> keyValueTimestamps) {
    final Properties properties = mkProperties(mkMap(mkEntry(ConsumerConfig.GROUP_ID_CONFIG, "test-group"), mkEntry(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()), mkEntry(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName()), mkEntry(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName())));
    IntegrationTestUtils.verifyKeyValueTimestamps(properties, topic, keyValueTimestamps);
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Deserializer(org.apache.kafka.common.serialization.Deserializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties)

Example 4 with Deserializer

use of org.apache.kafka.common.serialization.Deserializer in project kafka by apache.

the class SuppressionDurabilityIntegrationTest method verifyOutput.

private void verifyOutput(final String topic, final List<KeyValueTimestamp<String, Long>> keyValueTimestamps) {
    final Properties properties = mkProperties(mkMap(mkEntry(ConsumerConfig.GROUP_ID_CONFIG, "test-group"), mkEntry(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()), mkEntry(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName()), mkEntry(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ((Deserializer<Long>) LONG_DESERIALIZER).getClass().getName())));
    IntegrationTestUtils.verifyKeyValueTimestamps(properties, topic, keyValueTimestamps);
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Deserializer(org.apache.kafka.common.serialization.Deserializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties)

Example 5 with Deserializer

use of org.apache.kafka.common.serialization.Deserializer in project kafka by apache.

the class SuppressionDurabilityIntegrationTest method verifyOutput.

private void verifyOutput(final String topic, final Set<KeyValueTimestamp<String, Long>> keyValueTimestamps) {
    final Properties properties = mkProperties(mkMap(mkEntry(ConsumerConfig.GROUP_ID_CONFIG, "test-group"), mkEntry(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()), mkEntry(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ((Deserializer<String>) STRING_DESERIALIZER).getClass().getName()), mkEntry(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ((Deserializer<Long>) LONG_DESERIALIZER).getClass().getName())));
    IntegrationTestUtils.verifyKeyValueTimestamps(properties, topic, keyValueTimestamps);
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) LongDeserializer(org.apache.kafka.common.serialization.LongDeserializer) Deserializer(org.apache.kafka.common.serialization.Deserializer) Utils.mkProperties(org.apache.kafka.common.utils.Utils.mkProperties) Properties(java.util.Properties)

Aggregations

Deserializer (org.apache.kafka.common.serialization.Deserializer)6 Properties (java.util.Properties)5 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)3 Utils.mkProperties (org.apache.kafka.common.utils.Utils.mkProperties)3 ByteArrayDeserializer (org.apache.kafka.common.serialization.ByteArrayDeserializer)2 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)2 JsonNode (com.fasterxml.jackson.databind.JsonNode)1 JsonNodeFactory (com.fasterxml.jackson.databind.node.JsonNodeFactory)1 ObjectNode (com.fasterxml.jackson.databind.node.ObjectNode)1 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 Method (java.lang.reflect.Method)1 Type (java.lang.reflect.Type)1 Duration (java.time.Duration)1 StreamException (org.apache.druid.indexing.seekablestream.common.StreamException)1 ConsumerConfig (org.apache.kafka.clients.consumer.ConsumerConfig)1 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)1 Serde (org.apache.kafka.common.serialization.Serde)1 Serdes (org.apache.kafka.common.serialization.Serdes)1 Serializer (org.apache.kafka.common.serialization.Serializer)1 JsonDeserializer (org.apache.kafka.connect.json.JsonDeserializer)1