use of org.apache.kafka.connect.json.JsonDeserializer in project apache-kafka-on-k8s by banzaicloud.
the class PageViewUntypedDemo method main.
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-pageview-untyped");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, JsonTimestampExtractor.class);
props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
// setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
StreamsBuilder builder = new StreamsBuilder();
final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
final Consumed<String, JsonNode> consumed = Consumed.with(Serdes.String(), jsonSerde);
KStream<String, JsonNode> views = builder.stream("streams-pageview-input", consumed);
KTable<String, JsonNode> users = builder.table("streams-userprofile-input", consumed);
KTable<String, String> userRegions = users.mapValues(new ValueMapper<JsonNode, String>() {
@Override
public String apply(JsonNode record) {
return record.get("region").textValue();
}
});
KStream<JsonNode, JsonNode> regionCount = views.leftJoin(userRegions, new ValueJoiner<JsonNode, String, JsonNode>() {
@Override
public JsonNode apply(JsonNode view, String region) {
ObjectNode jNode = JsonNodeFactory.instance.objectNode();
return jNode.put("user", view.get("user").textValue()).put("page", view.get("page").textValue()).put("region", region == null ? "UNKNOWN" : region);
}
}).map(new KeyValueMapper<String, JsonNode, KeyValue<String, JsonNode>>() {
@Override
public KeyValue<String, JsonNode> apply(String user, JsonNode viewRegion) {
return new KeyValue<>(viewRegion.get("region").textValue(), viewRegion);
}
}).groupByKey(Serialized.with(Serdes.String(), jsonSerde)).windowedBy(TimeWindows.of(7 * 24 * 60 * 60 * 1000L).advanceBy(1000)).count().toStream().map(new KeyValueMapper<Windowed<String>, Long, KeyValue<JsonNode, JsonNode>>() {
@Override
public KeyValue<JsonNode, JsonNode> apply(Windowed<String> key, Long value) {
ObjectNode keyNode = JsonNodeFactory.instance.objectNode();
keyNode.put("window-start", key.window().start()).put("region", key.key());
ObjectNode valueNode = JsonNodeFactory.instance.objectNode();
valueNode.put("count", value);
return new KeyValue<>((JsonNode) keyNode, (JsonNode) valueNode);
}
});
// write to the result topic
regionCount.to("streams-pageviewstats-untyped-output", Produced.with(jsonSerde, jsonSerde));
KafkaStreams streams = new KafkaStreams(builder.build(), props);
streams.start();
// usually the stream application would be running forever,
// in this example we just let it run for some time and stop since the input data is finite.
Thread.sleep(5000L);
streams.close();
}
use of org.apache.kafka.connect.json.JsonDeserializer in project kafka by apache.
the class PageViewUntypedDemo method main.
public static void main(final String[] args) throws Exception {
final Properties props = new Properties();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-pageview-untyped");
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, JsonTimestampExtractor.class);
props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
// setting offset reset to earliest so that we can re-run the demo code with the same pre-loaded data
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
final StreamsBuilder builder = new StreamsBuilder();
final Serializer<JsonNode> jsonSerializer = new JsonSerializer();
final Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
final Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
final Consumed<String, JsonNode> consumed = Consumed.with(Serdes.String(), jsonSerde);
final KStream<String, JsonNode> views = builder.stream("streams-pageview-input", consumed);
final KTable<String, JsonNode> users = builder.table("streams-userprofile-input", consumed);
final KTable<String, String> userRegions = users.mapValues(record -> record.get("region").textValue());
final Duration duration24Hours = Duration.ofHours(24);
final KStream<JsonNode, JsonNode> regionCount = views.leftJoin(userRegions, (view, region) -> {
final ObjectNode jNode = JsonNodeFactory.instance.objectNode();
return (JsonNode) jNode.put("user", view.get("user").textValue()).put("page", view.get("page").textValue()).put("region", region == null ? "UNKNOWN" : region);
}).map((user, viewRegion) -> new KeyValue<>(viewRegion.get("region").textValue(), viewRegion)).groupByKey(Grouped.with(Serdes.String(), jsonSerde)).windowedBy(TimeWindows.ofSizeAndGrace(Duration.ofDays(7), duration24Hours).advanceBy(Duration.ofSeconds(1))).count().toStream().map((key, value) -> {
final ObjectNode keyNode = JsonNodeFactory.instance.objectNode();
keyNode.put("window-start", key.window().start()).put("region", key.key());
final ObjectNode valueNode = JsonNodeFactory.instance.objectNode();
valueNode.put("count", value);
return new KeyValue<>((JsonNode) keyNode, (JsonNode) valueNode);
});
// write to the result topic
regionCount.to("streams-pageviewstats-untyped-output", Produced.with(jsonSerde, jsonSerde));
final KafkaStreams streams = new KafkaStreams(builder.build(), props);
streams.start();
// usually the stream application would be running forever,
// in this example we just let it run for some time and stop since the input data is finite.
Thread.sleep(5000L);
streams.close();
}
use of org.apache.kafka.connect.json.JsonDeserializer in project debezium by debezium.
the class AbstractConnectorTest method initializeConnectorTestFramework.
@Before
public final void initializeConnectorTestFramework() {
LoggingContext.forConnector(getClass().getSimpleName(), "", "test");
keyJsonConverter = new JsonConverter();
valueJsonConverter = new JsonConverter();
keyJsonDeserializer = new JsonDeserializer();
valueJsonDeserializer = new JsonDeserializer();
Configuration converterConfig = Configuration.create().build();
Configuration deserializerConfig = Configuration.create().build();
keyJsonConverter.configure(converterConfig.asMap(), true);
valueJsonConverter.configure(converterConfig.asMap(), false);
keyJsonDeserializer.configure(deserializerConfig.asMap(), true);
valueJsonDeserializer.configure(deserializerConfig.asMap(), false);
resetBeforeEachTest();
consumedLines = new ArrayBlockingQueue<>(getMaximumEnqueuedRecordCount());
Testing.Files.delete(OFFSET_STORE_PATH);
OFFSET_STORE_PATH.getParent().toFile().mkdirs();
}
Aggregations