use of io.quarkus.runtime.StartupEvent in project microcks by microcks.
the class AsyncMinionApp method onStart.
/**
* Application startup method.
*/
void onStart(@Observes StartupEvent ev) {
// We need to retrieve Keycloak server from Microcks config.
KeycloakConfig config = microcksAPIConnector.getKeycloakConfig();
logger.infof("Microcks Keycloak server url {%s} and realm {%s}", config.getAuthServerUrl(), config.getRealm());
String keycloakEndpoint = config.getAuthServerUrl() + "/realms/" + config.getRealm() + "/protocol/openid-connect/token";
if (!keycloakAuthURL.isEmpty() && keycloakAuthURL.get().length() > 0) {
logger.infof("Use locally defined Keycloak Auth URL: %s", keycloakAuthURL);
keycloakEndpoint = keycloakAuthURL.get() + "/realms/" + config.getRealm() + "/protocol/openid-connect/token";
}
try {
// First retrieve an authentication token before fetching async messages to publish.
String oauthToken;
if (config.isEnabled()) {
// We've got a full Keycloak config, attempt an authent.
oauthToken = keycloakConnector.connectAndGetOAuthToken(keycloakEndpoint);
logger.info("Authentication to Keycloak server succeed!");
} else {
// No realm config, probably a dev mode - use a fake token.
oauthToken = "<anonymous-admin-token>";
logger.info("Keycloak protection is not enabled, using a fake token");
}
int page = 0;
boolean fetchServices = true;
while (fetchServices) {
List<Service> services = microcksAPIConnector.listServices("Bearer " + oauthToken, page, SERVICES_FETCH_SIZE);
for (Service service : services) {
logger.debug("Found service " + service.getName() + " - " + service.getVersion());
if (service.getType().equals(ServiceType.EVENT)) {
// Find the operations matching this minion constraints..
List<Operation> operations = service.getOperations().stream().filter(o -> Arrays.asList(restrictedFrequencies).contains(o.getDefaultDelay())).filter(o -> o.getBindings().keySet().stream().anyMatch(Arrays.asList(supportedBindings)::contains)).collect(Collectors.toList());
if (operations.size() > 0) {
logger.info("Found " + operations.size() + " candidate operations in " + service.getName() + " - " + service.getVersion());
ServiceView serviceView = microcksAPIConnector.getService("Bearer " + oauthToken, service.getId(), true);
for (Operation operation : operations) {
AsyncMockDefinition mockDefinition = new AsyncMockDefinition(serviceView.getService(), operation, serviceView.getMessagesMap().get(operation.getName()).stream().filter(e -> e instanceof UnidirectionalEvent).map(e -> ((UnidirectionalEvent) e).getEventMessage()).collect(Collectors.toList()));
mockRepository.storeMockDefinition(mockDefinition);
schemaRegistry.updateRegistryForService(mockDefinition.getOwnerService());
}
}
}
}
if (services.size() < SERVICES_FETCH_SIZE) {
fetchServices = false;
}
page++;
}
logger.info("Starting scheduling of all producer jobs...");
producerScheduler.scheduleAllProducerJobs();
} catch (ConnectorException ce) {
logger.error("Cannot authenticate to Keycloak server and thus enable to call Microcks API" + "to get Async APIs to mocks...", ce);
throw new RuntimeException("Unable to start the Minion due to connection exception");
} catch (IOException ioe) {
logger.error("IOException while communicating with Keycloak or Microcks API", ioe);
throw new RuntimeException("Unable to start the Minion due to IO exception");
}
}
use of io.quarkus.runtime.StartupEvent in project AD482-apps by RedHatTraining.
the class AmountWasDepositedPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<AmountWasDeposited> depositEventSerde = new ObjectMapperSerde<>(AmountWasDeposited.class);
ObjectMapperSerde<HighValueDepositWasDetected> highValueEventSerde = new ObjectMapperSerde<>(HighValueDepositWasDetected.class);
// TODO: Build the stream topology
builder.stream(AMOUNT_WAS_DEPOSITED_TOPIC, Consumed.with(Serdes.Long(), depositEventSerde)).filter((key, deposit) -> deposit.amount > 1000).map((key, deposit) -> {
logHighValueDepositAlert(deposit.bankAccountId, deposit.amount);
return new KeyValue<>(deposit.bankAccountId, new HighValueDepositWasDetected(deposit.bankAccountId, deposit.amount));
}).to(HIGH_VALUE_DEPOSIT_TOPIC, Produced.with(Serdes.Long(), highValueEventSerde));
// TODO: Create a Kafka streams and start it
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
use of io.quarkus.runtime.StartupEvent in project AD482-apps by RedHatTraining.
the class NotifyAboutLowProfitMarginPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<WindTurbineProfitMarginWasCalculated> profitEventSerde = new ObjectMapperSerde<>(WindTurbineProfitMarginWasCalculated.class);
ObjectMapperSerde<LowProfitMarginWasDetected> alertsEventSerde = new ObjectMapperSerde<>(LowProfitMarginWasDetected.class);
// TODO: Build the stream topology
builder.stream(WIND_TURBINE_PROFIT_MARGINS_TOPIC, Consumed.with(Serdes.Integer(), profitEventSerde)).filter((key, profit) -> profit.profitMargin < 0.10).map((key, profit) -> {
logLowProfitMarginAlert(key, profit.profitMargin);
return new KeyValue<>(key, new LowProfitMarginWasDetected(key, profit.profitMargin));
}).to(LOW_PROFIT_MARGIN_TOPIC, Produced.with(Serdes.Integer(), alertsEventSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
// Starting from a clean state
streams.cleanUp();
streams.start();
}
use of io.quarkus.runtime.StartupEvent in project AD482-apps by RedHatTraining.
the class RepartitionStream method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<TemperatureWasMeasuredInCelsius> temperaturesEventSerde = new ObjectMapperSerde<>(TemperatureWasMeasuredInCelsius.class);
KStream<String, TemperatureWasMeasuredInCelsius> stream = builder.stream(TEMPERATURES_TOPIC, Consumed.with(Serdes.String(), temperaturesEventSerde));
// TODO: Implement the topology for the repartitioning
stream.map((key, measure) -> {
LOGGER.infov("Repartitioning ID {0}, {1}ÂșC ...", measure.locationId, measure.measure);
return new KeyValue<>(measure.locationId, measure);
}).to(TEMPERATURES_REPARTITIONED_TOPIC, Produced.with(Serdes.Integer(), temperaturesEventSerde));
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
use of io.quarkus.runtime.StartupEvent in project AD482-apps by RedHatTraining.
the class AmountWasWithdrawnPipeline method onStart.
void onStart(@Observes StartupEvent startupEvent) {
StreamsBuilder builder = new StreamsBuilder();
ObjectMapperSerde<AmountWasWithdrawn> withdrawalEventSerde = new ObjectMapperSerde<>(AmountWasWithdrawn.class);
lowRiskEventSerde = new ObjectMapperSerde<>(LowRiskWithdrawnWasDetected.class);
moderateRiskEventSerde = new ObjectMapperSerde<>(ModerateRiskWithdrawnWasDetected.class);
highRiskEventSerde = new ObjectMapperSerde<>(HighRiskWithdrawnWasDetected.class);
// TODO: Add inverse filter
KStream<Long, AmountWasWithdrawn> mainStream = builder.stream(AMOUNT_WAS_WITHDRAWN_TOPIC, Consumed.with(Serdes.Long(), withdrawalEventSerde)).filterNot((key, withdrawal) -> withdrawal.amount <= 50);
// TODO: Split the stream
mainStream.split().branch((key, withdrawal) -> withdrawal.amount > 50 && withdrawal.amount <= 1000, Branched.withConsumer(this::processLowAmountEvents)).branch((key, withdrawal) -> withdrawal.amount > 1000 && withdrawal.amount <= 3000, Branched.withConsumer(this::processModerateAmountEvents)).branch((key, withdrawal) -> true, Branched.withConsumer(this::processHighAmountEvents));
// TODO: Create a Kafka streams and start it
streams = new KafkaStreams(builder.build(), generateStreamConfig());
streams.start();
}
Aggregations