use of com.redhat.cloud.notifications.ingress.Action in project notifications-backend by RedHatInsights.
the class EmailSender method sendEmail.
public Optional<NotificationHistory> sendEmail(User user, Event event, TemplateInstance subject, TemplateInstance body) {
final HttpRequest<Buffer> bopRequest = this.buildBOPHttpRequest();
LocalDateTime start = LocalDateTime.now(UTC);
Action action = event.getAction();
Timer.Sample processedTimer = Timer.start(registry);
// uses canonical EmailSubscription
try {
Endpoint endpoint = endpointRepository.getOrCreateDefaultEmailSubscription(action.getAccountId());
Notification notification = new Notification(event, endpoint);
// TODO Add recipients processing from policies-notifications processing (failed recipients)
// by checking the NotificationHistory's details section (if missing payload - fix in WebhookTypeProcessor)
// TODO If the call fails - we should probably rollback Kafka topic (if BOP is down for example)
// also add metrics for these failures
NotificationHistory history = webhookSender.doHttpRequest(notification, bopRequest, getPayload(user, action, subject, body));
processedTimer.stop(registry.timer("processor.email.processed", "bundle", action.getBundle(), "application", action.getApplication()));
processTime.record(Duration.between(start, LocalDateTime.now(UTC)));
return Optional.of(history);
} catch (Exception e) {
logger.info("Email sending failed", e);
return Optional.empty();
}
}
use of com.redhat.cloud.notifications.ingress.Action in project notifications-backend by RedHatInsights.
the class EmailSubscriptionTypeProcessor method process.
@Override
public List<NotificationHistory> process(Event event, List<Endpoint> endpoints) {
if (endpoints == null || endpoints.isEmpty()) {
return Collections.emptyList();
} else {
Action action = event.getAction();
final EmailTemplate template = emailTemplateFactory.get(action.getBundle(), action.getApplication());
boolean shouldSaveAggregation;
if (useTemplatesFromDb) {
shouldSaveAggregation = templateRepository.isEmailAggregationSupported(action.getBundle(), action.getApplication(), NON_INSTANT_SUBSCRIPTION_TYPES);
} else {
shouldSaveAggregation = NON_INSTANT_SUBSCRIPTION_TYPES.stream().anyMatch(emailSubscriptionType -> template.isSupported(action.getEventType(), emailSubscriptionType));
}
if (shouldSaveAggregation) {
EmailAggregation aggregation = new EmailAggregation();
aggregation.setAccountId(action.getAccountId());
aggregation.setApplicationName(action.getApplication());
aggregation.setBundleName(action.getBundle());
JsonObject transformedAction = baseTransformer.transform(action);
aggregation.setPayload(transformedAction);
emailAggregationRepository.addEmailAggregation(aggregation);
}
return sendEmail(event, Set.copyOf(endpoints), template);
}
}
use of com.redhat.cloud.notifications.ingress.Action in project notifications-backend by RedHatInsights.
the class EmailSubscriptionTypeProcessor method processAggregateEmailsByAggregationKey.
private void processAggregateEmailsByAggregationKey(EmailAggregationKey aggregationKey, LocalDateTime startTime, LocalDateTime endTime, EmailSubscriptionType emailSubscriptionType, boolean delete) {
final EmailTemplate emailTemplate = emailTemplateFactory.get(aggregationKey.getBundle(), aggregationKey.getApplication());
TemplateInstance subject;
TemplateInstance body;
if (useTemplatesFromDb) {
Optional<AggregationEmailTemplate> aggregationEmailTemplate = templateRepository.findAggregationEmailTemplate(aggregationKey.getBundle(), aggregationKey.getApplication(), emailSubscriptionType);
if (aggregationEmailTemplate.isEmpty()) {
if (delete) {
emailAggregationRepository.purgeOldAggregation(aggregationKey, endTime);
}
return;
} else {
String subjectData = aggregationEmailTemplate.get().getSubjectTemplate().getData();
subject = templateService.compileTemplate(subjectData, "subject");
String bodyData = aggregationEmailTemplate.get().getBodyTemplate().getData();
body = templateService.compileTemplate(bodyData, "body");
}
} else {
if (!emailTemplate.isEmailSubscriptionSupported(emailSubscriptionType)) {
if (delete) {
emailAggregationRepository.purgeOldAggregation(aggregationKey, endTime);
}
return;
}
subject = emailTemplate.getTitle(null, emailSubscriptionType);
body = emailTemplate.getBody(null, emailSubscriptionType);
}
if (subject == null || body == null) {
if (delete) {
emailAggregationRepository.purgeOldAggregation(aggregationKey, endTime);
}
return;
}
try {
for (Map.Entry<User, Map<String, Object>> aggregation : emailAggregator.getAggregated(aggregationKey, emailSubscriptionType, startTime, endTime).entrySet()) {
Context.ContextBuilder contextBuilder = new Context.ContextBuilder();
aggregation.getValue().forEach(contextBuilder::withAdditionalProperty);
Action action = new Action();
action.setContext(contextBuilder.build());
action.setEvents(List.of());
action.setAccountId(aggregationKey.getAccountId());
action.setApplication(aggregationKey.getApplication());
action.setBundle(aggregationKey.getBundle());
// We don't have a eventtype as this aggregates over multiple event types
action.setEventType(null);
action.setTimestamp(LocalDateTime.now(ZoneOffset.UTC));
Event event = new Event();
event.setId(UUID.randomUUID());
event.setAction(action);
emailSender.sendEmail(aggregation.getKey(), event, subject, body);
}
} finally {
if (delete) {
emailAggregationRepository.purgeOldAggregation(aggregationKey, endTime);
}
}
}
use of com.redhat.cloud.notifications.ingress.Action in project notifications-backend by RedHatInsights.
the class EventConsumer method process.
@Incoming(INGRESS_CHANNEL)
@Acknowledgment(PRE_PROCESSING)
@Blocking
public CompletionStage<Void> process(Message<String> message) {
// This timer will have dynamic tag values based on the action parsed from the received message.
Timer.Sample consumedTimer = Timer.start(registry);
String payload = message.getPayload();
// The two following variables have to be final or effectively final. That why their type is String[] instead of String.
String[] bundleName = new String[1];
String[] appName = new String[1];
/*
* Step 1
* The payload (JSON) is parsed into an Action.
*/
try {
Action action;
try {
action = actionParser.fromJsonString(payload);
} catch (Exception e) {
/*
* An exception (most likely UncheckedIOException) was thrown during the payload parsing. The message
* is therefore considered rejected.
*/
rejectedCounter.increment();
throw e;
}
/*
* The payload was successfully parsed. The resulting Action contains a bundle/app/eventType triplet which
* is logged.
*/
bundleName[0] = action.getBundle();
appName[0] = action.getApplication();
String eventTypeName = action.getEventType();
LOGGER.infof("Processing received action: (%s) %s/%s/%s", action.getAccountId(), bundleName[0], appName[0], eventTypeName);
/*
* Step 2
* The message ID is extracted from the Kafka message headers. It can be null for now to give the onboarded
* apps time to change their integration and start sending the new header. The message ID may become
* mandatory later. If so, we may want to throw an exception when it is null.
*/
UUID messageId = kafkaMessageDeduplicator.findMessageId(bundleName[0], appName[0], message);
statelessSessionFactory.withSession(statelessSession -> {
/*
* Step 3
* It's time to check if the message ID is already known. For now, messages without an ID
* (messageId == null) are always considered new.
*/
if (kafkaMessageDeduplicator.isDuplicate(messageId)) {
/*
* The message ID is already known which means we already processed the current
* message and sent notifications. The message is therefore ignored.
*/
duplicateCounter.increment();
} else {
/*
* Step 4
* The message ID is new. Let's persist it. The current message will never be processed again as
* long as its ID stays in the DB.
*/
kafkaMessageDeduplicator.registerMessageId(messageId);
/*
* Step 5
* We need to retrieve an EventType from the DB using the bundle/app/eventType triplet from the
* parsed Action.
*/
EventType eventType;
try {
eventType = eventTypeRepository.getEventType(bundleName[0], appName[0], eventTypeName);
} catch (NoResultException e) {
/*
* A NoResultException was thrown because no EventType was found. The message is therefore
* considered rejected.
*/
rejectedCounter.increment();
throw new NoResultException(String.format(EVENT_TYPE_NOT_FOUND_MSG, bundleName[0], appName[0], eventTypeName));
}
/*
* Step 6
* The EventType was found. It's time to create an Event from the current message and persist it.
*/
Event event = new Event(eventType, payload, action);
if (event.getId() == null) {
// NOTIF-499 If there is no ID provided whatsoever we create one.
if (messageId != null) {
event.setId(messageId);
} else {
LOGGER.infof("NOID: Event with %s/%s/%s did not have an incoming id or messageId ", bundleName[0], appName[0], eventTypeName);
event.setId(UUID.randomUUID());
}
}
eventRepository.create(event);
/*
* Step 7
* The Event and the Action it contains are processed by all relevant endpoint processors.
*/
try {
endpointProcessor.process(event);
} catch (Exception e) {
/*
* The Event processing failed.
*/
processingErrorCounter.increment();
throw e;
}
}
});
} catch (Exception e) {
/*
* An exception was thrown at some point during the Kafka message processing,
* it is logged and added to the exception counter metric.
*/
processingExceptionCounter.increment();
LOGGER.infof(e, "Could not process the payload: %s", payload);
} finally {
// bundleName[0] and appName[0] are null when the action parsing failed.
String bundle = bundleName[0] == null ? "" : bundleName[0];
String application = appName[0] == null ? "" : appName[0];
consumedTimer.stop(registry.timer(CONSUMED_TIMER_NAME, "bundle", bundle, "application", application));
}
return message.ack();
}
Aggregations