use of org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER in project mod-circulation-storage by folio-org.
the class PubSubPublishingService method publishEvent.
public CompletableFuture<Boolean> publishEvent(String eventType, String payload) {
Event event = new Event().withId(UUID.randomUUID().toString()).withEventType(eventType).withEventPayload(payload).withEventMetadata(new EventMetadata().withPublishedBy(PubSubClientUtils.getModuleId()).withTenantId(okapiHeaders.get(OKAPI_TENANT_HEADER)).withEventTTL(1));
final CompletableFuture<Boolean> publishResult = new CompletableFuture<>();
OkapiConnectionParams params = new OkapiConnectionParams();
params.setOkapiUrl(okapiHeaders.get(OKAPI_URL_HEADER));
params.setTenantId(okapiHeaders.get(OKAPI_TENANT_HEADER));
params.setToken(okapiHeaders.get(OKAPI_TOKEN_HEADER));
context.runOnContext(v -> PubSubClientUtils.sendEventMessage(event, params).whenComplete((result, throwable) -> {
if (Boolean.TRUE.equals(result)) {
logger.debug("Event published successfully. ID: {}, type: {}, payload: {}", event.getId(), event.getEventType(), event.getEventPayload());
publishResult.complete(true);
} else {
logger.error("Failed to publish event. ID: {}, type: {}, payload: {}", throwable, event.getId(), event.getEventType(), event.getEventPayload());
if (throwable == null) {
publishResult.complete(false);
} else {
publishResult.completeExceptionally(throwable);
}
}
}));
return publishResult;
}
use of org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER in project mod-circulation by folio-org.
the class PubSubPublishingService method publishEvent.
public CompletableFuture<Boolean> publishEvent(String eventType, String payload) {
Event event = new Event().withId(UUID.randomUUID().toString()).withEventType(eventType).withEventPayload(payload).withEventMetadata(new EventMetadata().withPublishedBy(PubSubClientUtils.getModuleId()).withTenantId(okapiHeaders.get(OKAPI_TENANT_HEADER)).withEventTTL(1));
final CompletableFuture<Boolean> publishResult = new CompletableFuture<>();
OkapiConnectionParams params = new OkapiConnectionParams();
params.setOkapiUrl(okapiHeaders.get(OKAPI_URL_HEADER));
params.setTenantId(okapiHeaders.get(OKAPI_TENANT_HEADER));
params.setToken(okapiHeaders.get(OKAPI_TOKEN_HEADER));
vertxContext.runOnContext(v -> PubSubClientUtils.sendEventMessage(event, params).whenComplete((result, throwable) -> {
if (Boolean.TRUE.equals(result)) {
logger.info("Event published successfully. ID: {}, type: {}, payload: {}", event.getId(), event.getEventType(), event.getEventPayload());
publishResult.complete(true);
} else {
logger.error("Failed to publish event. ID: {}, type: {}, payload: {}, cause: {}", event.getId(), event.getEventType(), event.getEventPayload(), throwable);
if (throwable == null) {
publishResult.complete(false);
} else {
publishResult.completeExceptionally(throwable);
}
}
}));
return publishResult;
}
use of org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER in project mod-inventory by folio-org.
the class MarcBibInstanceHridSetKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = OBJECT_MAPPER.readValue(record.value(), Event.class);
@SuppressWarnings("unchecked") HashMap<String, String> eventPayload = OBJECT_MAPPER.readValue(event.getEventPayload(), HashMap.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.get(JOB_EXECUTION_ID_HEADER);
LOGGER.info("Event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", event.getEventType(), recordId, jobExecutionId, chunkId);
if (isEmpty(eventPayload.get(MARC_KEY))) {
String message = format("Event payload does not contain required data to update Instance with event type: '%s', recordId: '%s' by jobExecution: '%s' and chunkId: '%s'", event.getEventType(), recordId, jobExecutionId, chunkId);
LOGGER.error(message);
return Future.failedFuture(message);
}
Context context = EventHandlingUtil.constructContext(headersMap.get(OKAPI_TENANT_HEADER), headersMap.get(OKAPI_TOKEN_HEADER), headersMap.get(OKAPI_URL_HEADER));
Record marcRecord = new JsonObject(eventPayload.get(MARC_KEY)).mapTo(Record.class);
mappingMetadataCache.get(jobExecutionId, context).map(metadataOptional -> metadataOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId)))).onSuccess(mappingMetadataDto -> ensureEventPayloadWithMappingMetadata(eventPayload, mappingMetadataDto)).compose(v -> instanceUpdateDelegate.handle(eventPayload, marcRecord, context)).onComplete(ar -> {
if (ar.succeeded()) {
eventPayload.remove(CURRENT_RETRY_NUMBER);
promise.complete(record.key());
} else {
if (ar.cause() instanceof OptimisticLockingException) {
processOLError(record, promise, eventPayload, ar);
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Failed to set MarcBib Hrid by jobExecutionId {}:{}", jobExecutionId, ar.cause());
promise.fail(ar.cause());
}
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s", record.topic()), e);
return Future.failedFuture(e);
}
}
use of org.folio.rest.util.OkapiConnectionParams.OKAPI_URL_HEADER in project mod-inventory by folio-org.
the class MarcHoldingsRecordHridSetKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = OBJECT_MAPPER.readValue(record.value(), Event.class);
@SuppressWarnings("unchecked") HashMap<String, String> eventPayload = OBJECT_MAPPER.readValue(event.getEventPayload(), HashMap.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.get(JOB_EXECUTION_ID_HEADER);
LOGGER.info("Event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", event.getEventType(), recordId, jobExecutionId, chunkId);
if (isEmpty(eventPayload.get(MARC_KEY))) {
String message = String.format("Event payload does not contain required data to update Holdings with event type: '%s', recordId: '%s' by jobExecution: '%s' and chunkId: '%s'", event.getEventType(), recordId, jobExecutionId, chunkId);
LOGGER.error(message);
return Future.failedFuture(message);
}
Context context = constructContext(headersMap.get(OKAPI_TENANT_HEADER), headersMap.get(OKAPI_TOKEN_HEADER), headersMap.get(OKAPI_URL_HEADER));
Record marcRecord = Json.decodeValue(eventPayload.get(MARC_KEY), Record.class);
mappingMetadataCache.get(jobExecutionId, context).map(metadataOptional -> metadataOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId)))).onSuccess(mappingMetadataDto -> ensureEventPayloadWithMappingMetadata(eventPayload, mappingMetadataDto)).compose(v -> holdingsRecordUpdateDelegate.handle(eventPayload, marcRecord, context)).onComplete(ar -> {
if (ar.succeeded()) {
eventPayload.remove(CURRENT_RETRY_NUMBER);
promise.complete(record.key());
} else {
if (ar.cause() instanceof OptimisticLockingException) {
processOLError(record, promise, eventPayload, ar);
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Failed to process data import event payload ", ar.cause());
promise.fail(ar.cause());
}
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s ", record.topic()), e);
return Future.failedFuture(e);
}
}
Aggregations