use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class VehicleMovementTracker method buildTopology.
@Produces
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
// Event Key SerDe (all events use the vehicle id as the Kafka record key)
Serde<Integer> intSerde = Serdes.Integer();
// Event Value SerDes
ObjectMapperSerde<Vehicle> vehicleSerde = new ObjectMapperSerde<>(Vehicle.class);
ObjectMapperSerde<VehicleMetrics> vehicleMetricsSerde = new ObjectMapperSerde<>(VehicleMetrics.class);
ObjectMapperSerde<VehicleMoved> vehicleMovedSerde = new ObjectMapperSerde<>(VehicleMoved.class);
ObjectMapperSerde<VehicleStatus> vehicleStatusSerde = new ObjectMapperSerde<>(VehicleStatus.class);
GlobalKTable<Integer, Vehicle> vehiclesTable = builder.globalTable("vehicle-registered", Materialized.<Integer, Vehicle, KeyValueStore<Bytes, byte[]>>as("vehicles-store").withKeySerde(intSerde).withValueSerde(vehicleSerde));
KStream<Integer, VehicleMoved> movementsStream = builder.stream("vehicle-moved", Consumed.with(intSerde, vehicleMovedSerde));
KStream<Integer, VehicleStatus> vehicleStatusStream = movementsStream.join(vehiclesTable, (vehicleId, vehicleMoved) -> vehicleId, (vehicleMoved, vehicle) -> new VehicleStatus(vehicle, vehicleMoved.latitude, vehicleMoved.longitude, vehicleMoved.elevation));
// TODO: materialize vehicleStatusStream to the "vehicle-status" topic
vehicleStatusStream.to("vehicle-status", Produced.with(intSerde, vehicleStatusSerde));
vehicleStatusStream.groupByKey().aggregate(VehicleMetrics::new, (vehicleId, vehicleStatus, vehicleMetrics) -> vehicleMetrics.update(vehicleStatus), Materialized.<Integer, VehicleMetrics, KeyValueStore<Bytes, byte[]>>as("vehicle-metrics-store").withKeySerde(intSerde).withValueSerde(vehicleMetricsSerde));
return builder.build();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class AmountWasWithdrawnPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<AmountWasWithdrawn> withdrawalEventSerde = new ObjectMapperSerde<>(AmountWasWithdrawn.class);
lowRiskEventSerde = new ObjectMapperSerde<>(LowRiskWithdrawnWasDetected.class);
moderateRiskEventSerde = new ObjectMapperSerde<>(ModerateRiskWithdrawnWasDetected.class);
highRiskEventSerde = new ObjectMapperSerde<>(HighRiskWithdrawnWasDetected.class);
// TODO: Add inverse filter
KStream<Long, AmountWasWithdrawn> mainStream = builder.stream(AMOUNT_WAS_WITHDRAWN_TOPIC, Consumed.with(Serdes.Long(), withdrawalEventSerde)).filterNot((key, withdrawal) -> withdrawal.amount <= 50);
// TODO: Split the stream
mainStream.split().branch((key, withdrawal) -> withdrawal.amount > 50 && withdrawal.amount <= 1000, Branched.withConsumer(this::processLowAmountEvents)).branch((key, withdrawal) -> withdrawal.amount > 1000 && withdrawal.amount <= 3000, Branched.withConsumer(this::processModerateAmountEvents)).branch((key, withdrawal) -> true, Branched.withConsumer(this::processHighAmountEvents));
// TODO: Create a Kafka streams and start it
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class TransformTemperature method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<TemperatureWasMeasuredInCelsius> temperaturesEventSerde = new ObjectMapperSerde<>(TemperatureWasMeasuredInCelsius.class);
ObjectMapperSerde<TemperatureWasTransformed> temperaturesTransformedEventSerde = new ObjectMapperSerde<>(TemperatureWasTransformed.class);
KStream<String, TemperatureWasMeasuredInCelsius> stream = builder.stream(TEMPERATURES_TOPIC, Consumed.with(Serdes.String(), temperaturesEventSerde));
stream.map((key, measure) -> {
LOGGER.infov("Transforming {0}ºC to ºF...", measure.measure);
// Simulate a slow calculation
try {
Thread.sleep(CALCULATION_DELAY);
} catch (InterruptedException e) {
e.printStackTrace();
}
Double fahrenheit = ((double) measure.measure * 9 / 5) + 32;
LOGGER.infov("Temp. transformed {0}ºC -> {1}ºF (ID: {2})", measure.measure, fahrenheit, measure.locationId);
return new KeyValue<>(measure.locationId, new TemperatureWasTransformed(measure.locationId, measure.measure, fahrenheit));
}).to(MEASURED_TEMPERATURES_TOPIC, Produced.with(Serdes.Integer(), temperaturesTransformedEventSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project quarkus by quarkusio.
the class ObjectMapperSerdeTest method shouldSerializeAndDeserializeEntityWithGivenObjectMapper.
@Test
public void shouldSerializeAndDeserializeEntityWithGivenObjectMapper() throws Exception {
MyEntity entity = new MyEntity();
entity.id = 42L;
entity.name = "Bob";
ObjectMapper objectMapper = new ObjectMapper();
try (ObjectMapperSerde<MyEntity> serde = new ObjectMapperSerde<>(MyEntity.class, objectMapper)) {
byte[] serialized = serde.serializer().serialize("my-topic", entity);
MyEntity deserialized = serde.deserializer().deserialize("my-topic", serialized);
assertThat(deserialized.id).isEqualTo(42L);
assertThat(deserialized.name).isEqualTo("Bob");
}
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project quarkus by quarkusio.
the class KafkaStreamsPipeline method buildTopology.
@Produces
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<Category> categorySerde = new ObjectMapperSerde<>(Category.class);
ObjectMapperSerde<Customer> customerSerde = new ObjectMapperSerde<>(Customer.class);
ObjectMapperSerde<EnrichedCustomer> enrichedCustomerSerde = new ObjectMapperSerde<>(EnrichedCustomer.class);
KTable<Integer, Category> categories = builder.table("streams-test-categories", Consumed.with(Serdes.Integer(), categorySerde));
KStream<Integer, EnrichedCustomer> customers = builder.stream("streams-test-customers", Consumed.with(Serdes.Integer(), customerSerde)).selectKey((id, customer) -> customer.category).join(categories, (customer, category) -> {
return new EnrichedCustomer(customer.id, customer.name, category);
}, Joined.with(Serdes.Integer(), customerSerde, categorySerde));
KeyValueBytesStoreSupplier storeSupplier = Stores.inMemoryKeyValueStore("countstore");
customers.groupByKey().count(Materialized.<Integer, Long>as(storeSupplier));
customers.selectKey((categoryId, customer) -> customer.id).to("streams-test-customers-processed", Produced.with(Serdes.Integer(), enrichedCustomerSerde));
return builder.build();
}
Aggregations