use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class VehiclePositionsStream method buildTopology.
@Produces
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
// TODO: create serde to desearialize VehiclePosition messages
ObjectMapperSerde<VehiclePosition> vehiclePositionSerde = new ObjectMapperSerde<>(VehiclePosition.class);
// TODO: Create the stream from the "vehicle-positions" topic
KStream<String, VehiclePosition> stream = builder.stream("vehicle-positions", Consumed.with(stringSerde, vehiclePositionSerde));
// TODO: print stream values
stream.foreach((key, value) -> System.out.println("Received vehicle position: " + value));
// TODO: map positions to elevations in feet
// and send the stream to "vehicle-feet-elevations" topic
stream.map((key, value) -> {
Double feet = value.elevation * 3.28084;
return KeyValue.pair(value.vehicleId, feet);
}).to("vehicle-feet-elevations", Produced.with(Serdes.Integer(), Serdes.Double()));
// TODO: group positions by vehicle id
KGroupedStream<Integer, VehiclePosition> positionsByVehicle = stream.groupBy((key, value) -> value.vehicleId, Grouped.with(Serdes.Integer(), vehiclePositionSerde));
// TODO: count positions by vehicle
KTable<Integer, Long> countsByVehicle = positionsByVehicle.count();
// TODO: print the count values
countsByVehicle.toStream().foreach((vehicleId, count) -> System.out.println("Vehicle: " + vehicleId + " Positions reported: " + count + "\n"));
return builder.build();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class NotifyAboutLowProfitMarginPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<WindTurbineProfitMarginWasCalculated> profitEventSerde = new ObjectMapperSerde<>(WindTurbineProfitMarginWasCalculated.class);
ObjectMapperSerde<LowProfitMarginWasDetected> alertsEventSerde = new ObjectMapperSerde<>(LowProfitMarginWasDetected.class);
// TODO: Build the stream topology
builder.stream(WIND_TURBINE_PROFIT_MARGINS_TOPIC, Consumed.with(Serdes.Integer(), profitEventSerde)).filter((key, profit) -> profit.profitMargin < 0.10).map((key, profit) -> {
logLowProfitMarginAlert(key, profit.profitMargin);
return new KeyValue<>(key, new LowProfitMarginWasDetected(key, profit.profitMargin));
}).to(LOW_PROFIT_MARGIN_TOPIC, Produced.with(Serdes.Integer(), alertsEventSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
// Starting from a clean state
streams.cleanUp();
streams.start();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class WindTurbineProfitMarginsPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<WindTurbineEarningWasAdded> earningEventSerde = new ObjectMapperSerde<>(WindTurbineEarningWasAdded.class);
ObjectMapperSerde<WindTurbineExpenseWasAdded> expenseEventSerde = new ObjectMapperSerde<>(WindTurbineExpenseWasAdded.class);
ObjectMapperSerde<AverageData> averageDataSerde = new ObjectMapperSerde<>(AverageData.class);
ObjectMapperSerde<WindTurbineProfitMarginWasCalculated> profitEventsSerde = new ObjectMapperSerde<>(WindTurbineProfitMarginWasCalculated.class);
// TODO: Create a KStream for the earning events
KStream<Integer, WindTurbineEarningWasAdded> earningsStream = builder.stream(WIND_TURBINE_EARNINGS_TOPIC, Consumed.with(Serdes.Integer(), earningEventSerde));
// TODO: Aggregate the earnings
KTable<Integer, AverageData> aggregatedEarnings = earningsStream.groupByKey().aggregate(AverageData::new, (key, value, aggregate) -> {
aggregate.increaseCount(1);
aggregate.increaseSum(value.amount);
return aggregate;
}, Materialized.<Integer, AverageData, KeyValueStore<Bytes, byte[]>>as(AGGREGATED_EARNINGS_STORE).withKeySerde(Serdes.Integer()).withValueSerde(averageDataSerde));
// TODO: Calculate the average earnings
KTable<Integer, Double> averageEarningsTable = aggregatedEarnings.mapValues(value -> value.sum / value.count, Materialized.<Integer, Double, KeyValueStore<Bytes, byte[]>>as(AVERAGE_EARNINGS_STORE).withKeySerde(Serdes.Integer()).withValueSerde(Serdes.Double()));
// TODO: Create a KStream for the expense events
KStream<Integer, WindTurbineExpenseWasAdded> expensesStream = builder.stream(WIND_TURBINE_EXPENSES_TOPIC, Consumed.with(Serdes.Integer(), expenseEventSerde));
// TODO: Aggregate the expenses
KTable<Integer, AverageData> aggregatedExpenses = expensesStream.groupByKey().aggregate(AverageData::new, (key, value, aggregate) -> {
aggregate.increaseCount(1);
aggregate.increaseSum(value.amount);
return aggregate;
}, Materialized.<Integer, AverageData, KeyValueStore<Bytes, byte[]>>as(AGGREGATED_EXPENSES_STORE).withKeySerde(Serdes.Integer()).withValueSerde(averageDataSerde));
// TODO: Calculate the average expenses
KTable<Integer, Double> averageExpensesTable = aggregatedExpenses.mapValues(value -> value.sum / value.count, Materialized.<Integer, Double, KeyValueStore<Bytes, byte[]>>as(AVERAGE_EXPENSES_STORE).withKeySerde(Serdes.Integer()).withValueSerde(Serdes.Double()));
// TODO: Calculate the profit margins
averageEarningsTable.join(averageExpensesTable, WindTurbineProfitMarginWasCalculated::new).toStream().to(WIND_TURBINE_PROFIT_MARGINS_TOPIC, Produced.with(Serdes.Integer(), profitEventsSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
// Starting from a clean state
streams.cleanUp();
streams.start();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project AD482-apps by RedHatTraining.
the class RepartitionStream method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<TemperatureWasMeasuredInCelsius> temperaturesEventSerde = new ObjectMapperSerde<>(TemperatureWasMeasuredInCelsius.class);
KStream<String, TemperatureWasMeasuredInCelsius> stream = builder.stream(TEMPERATURES_TOPIC, Consumed.with(Serdes.String(), temperaturesEventSerde));
// TODO: Implement the topology for the repartitioning
stream.map((key, measure) -> {
LOGGER.infov("Repartitioning ID {0}, {1}ÂșC ...", measure.locationId, measure.measure);
return new KeyValue<>(measure.locationId, measure);
}).to(TEMPERATURES_REPARTITIONED_TOPIC, Produced.with(Serdes.Integer(), temperaturesEventSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
use of io.quarkus.kafka.client.serialization.ObjectMapperSerde in project quarkus-quickstarts by quarkusio.
the class TopologyProducer method buildTopology.
@Produces
public Topology buildTopology() {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<WeatherStation> weatherStationSerde = new ObjectMapperSerde<>(WeatherStation.class);
ObjectMapperSerde<Aggregation> aggregationSerde = new ObjectMapperSerde<>(Aggregation.class);
KeyValueBytesStoreSupplier storeSupplier = Stores.persistentKeyValueStore(WEATHER_STATIONS_STORE);
GlobalKTable<Integer, WeatherStation> stations = builder.globalTable(WEATHER_STATIONS_TOPIC, Consumed.with(Serdes.Integer(), weatherStationSerde));
builder.stream(TEMPERATURE_VALUES_TOPIC, Consumed.with(Serdes.Integer(), Serdes.String())).join(stations, (stationId, timestampAndValue) -> stationId, (timestampAndValue, station) -> {
String[] parts = timestampAndValue.split(";");
return new TemperatureMeasurement(station.id, station.name, Instant.parse(parts[0]), Double.valueOf(parts[1]));
}).groupByKey().aggregate(Aggregation::new, (stationId, value, aggregation) -> aggregation.updateFrom(value), Materialized.<Integer, Aggregation>as(storeSupplier).withKeySerde(Serdes.Integer()).withValueSerde(aggregationSerde)).toStream().to(TEMPERATURES_AGGREGATED_TOPIC, Produced.with(Serdes.Integer(), aggregationSerde));
return builder.build();
}
Aggregations