use of org.folio.inventory.common.Context in project mod-inventory by folio-org.
the class DataImportKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = Json.decodeValue(record.value(), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.getJobExecutionId();
LOGGER.info("Data import event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", eventPayload.getEventType(), recordId, jobExecutionId, chunkId);
eventPayload.getContext().put(RECORD_ID_HEADER, recordId);
eventPayload.getContext().put(CHUNK_ID_HEADER, chunkId);
Context context = EventHandlingUtil.constructContext(eventPayload.getTenant(), eventPayload.getToken(), eventPayload.getOkapiUrl());
String jobProfileSnapshotId = eventPayload.getContext().get(PROFILE_SNAPSHOT_ID_KEY);
profileSnapshotCache.get(jobProfileSnapshotId, context).toCompletionStage().thenCompose(snapshotOptional -> snapshotOptional.map(profileSnapshot -> EventManager.handleEvent(eventPayload, profileSnapshot)).orElse(CompletableFuture.failedFuture(new EventProcessingException(format("Job profile snapshot with id '%s' does not exist", jobProfileSnapshotId))))).whenComplete((processedPayload, throwable) -> {
if (throwable != null) {
promise.fail(throwable);
} else if (DI_ERROR.value().equals(processedPayload.getEventType())) {
promise.fail("Failed to process data import event payload");
} else {
promise.complete(record.key());
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s", record.topic()), e);
return Future.failedFuture(e);
}
}
use of org.folio.inventory.common.Context in project mod-inventory by folio-org.
the class MarcBibInstanceHridSetKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = OBJECT_MAPPER.readValue(record.value(), Event.class);
@SuppressWarnings("unchecked") HashMap<String, String> eventPayload = OBJECT_MAPPER.readValue(event.getEventPayload(), HashMap.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.get(JOB_EXECUTION_ID_HEADER);
LOGGER.info("Event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", event.getEventType(), recordId, jobExecutionId, chunkId);
if (isEmpty(eventPayload.get(MARC_KEY))) {
String message = format("Event payload does not contain required data to update Instance with event type: '%s', recordId: '%s' by jobExecution: '%s' and chunkId: '%s'", event.getEventType(), recordId, jobExecutionId, chunkId);
LOGGER.error(message);
return Future.failedFuture(message);
}
Context context = EventHandlingUtil.constructContext(headersMap.get(OKAPI_TENANT_HEADER), headersMap.get(OKAPI_TOKEN_HEADER), headersMap.get(OKAPI_URL_HEADER));
Record marcRecord = new JsonObject(eventPayload.get(MARC_KEY)).mapTo(Record.class);
mappingMetadataCache.get(jobExecutionId, context).map(metadataOptional -> metadataOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId)))).onSuccess(mappingMetadataDto -> ensureEventPayloadWithMappingMetadata(eventPayload, mappingMetadataDto)).compose(v -> instanceUpdateDelegate.handle(eventPayload, marcRecord, context)).onComplete(ar -> {
if (ar.succeeded()) {
eventPayload.remove(CURRENT_RETRY_NUMBER);
promise.complete(record.key());
} else {
if (ar.cause() instanceof OptimisticLockingException) {
processOLError(record, promise, eventPayload, ar);
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Failed to set MarcBib Hrid by jobExecutionId {}:{}", jobExecutionId, ar.cause());
promise.fail(ar.cause());
}
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s", record.topic()), e);
return Future.failedFuture(e);
}
}
use of org.folio.inventory.common.Context in project mod-inventory by folio-org.
the class MarcHoldingsRecordHridSetKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = OBJECT_MAPPER.readValue(record.value(), Event.class);
@SuppressWarnings("unchecked") HashMap<String, String> eventPayload = OBJECT_MAPPER.readValue(event.getEventPayload(), HashMap.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.get(JOB_EXECUTION_ID_HEADER);
LOGGER.info("Event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", event.getEventType(), recordId, jobExecutionId, chunkId);
if (isEmpty(eventPayload.get(MARC_KEY))) {
String message = String.format("Event payload does not contain required data to update Holdings with event type: '%s', recordId: '%s' by jobExecution: '%s' and chunkId: '%s'", event.getEventType(), recordId, jobExecutionId, chunkId);
LOGGER.error(message);
return Future.failedFuture(message);
}
Context context = constructContext(headersMap.get(OKAPI_TENANT_HEADER), headersMap.get(OKAPI_TOKEN_HEADER), headersMap.get(OKAPI_URL_HEADER));
Record marcRecord = Json.decodeValue(eventPayload.get(MARC_KEY), Record.class);
mappingMetadataCache.get(jobExecutionId, context).map(metadataOptional -> metadataOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId)))).onSuccess(mappingMetadataDto -> ensureEventPayloadWithMappingMetadata(eventPayload, mappingMetadataDto)).compose(v -> holdingsRecordUpdateDelegate.handle(eventPayload, marcRecord, context)).onComplete(ar -> {
if (ar.succeeded()) {
eventPayload.remove(CURRENT_RETRY_NUMBER);
promise.complete(record.key());
} else {
if (ar.cause() instanceof OptimisticLockingException) {
processOLError(record, promise, eventPayload, ar);
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Failed to process data import event payload ", ar.cause());
promise.fail(ar.cause());
}
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s ", record.topic()), e);
return Future.failedFuture(e);
}
}
use of org.folio.inventory.common.Context in project mod-inventory by folio-org.
the class CreateItemEventHandler method handle.
@Override
public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload dataImportEventPayload) {
CompletableFuture<DataImportEventPayload> future = new CompletableFuture<>();
try {
dataImportEventPayload.setEventType(DI_INVENTORY_ITEM_CREATED.value());
HashMap<String, String> payloadContext = dataImportEventPayload.getContext();
if (payloadContext == null || isBlank(payloadContext.get(EntityType.MARC_BIBLIOGRAPHIC.value()))) {
LOG.error(PAYLOAD_HAS_NO_DATA_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
if (dataImportEventPayload.getCurrentNode().getChildSnapshotWrappers().isEmpty()) {
LOG.error(ACTION_HAS_NO_MAPPING_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(ACTION_HAS_NO_MAPPING_MSG));
}
dataImportEventPayload.getEventsChain().add(dataImportEventPayload.getEventType());
dataImportEventPayload.setCurrentNode(dataImportEventPayload.getCurrentNode().getChildSnapshotWrappers().get(0));
dataImportEventPayload.getContext().put(ITEM.value(), new JsonObject().encode());
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = dataImportEventPayload.getContext().get(RECORD_ID_HEADER);
String chunkId = dataImportEventPayload.getContext().get(CHUNK_ID_HEADER);
Future<RecordToEntity> recordToItemFuture = idStorageService.store(recordId, UUID.randomUUID().toString(), dataImportEventPayload.getTenant());
recordToItemFuture.onSuccess(res -> {
String itemId = res.getEntityId();
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
ItemCollection itemCollection = storage.getItemCollection(context);
mappingMetadataCache.get(jobExecutionId, context).map(parametersOptional -> parametersOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId, recordId, chunkId)))).map(mappingMetadataDto -> {
MappingParameters mappingParameters = Json.decodeValue(mappingMetadataDto.getMappingParams(), MappingParameters.class);
MappingManager.map(dataImportEventPayload, new MappingContext().withMappingParameters(mappingParameters));
return processMappingResult(dataImportEventPayload, itemId);
}).compose(mappedItemJson -> {
List<String> errors = validateItem(mappedItemJson, requiredFields);
if (!errors.isEmpty()) {
String msg = format("Mapped Item is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", errors, jobExecutionId, recordId, chunkId);
LOG.error(msg);
return Future.failedFuture(msg);
}
Item mappedItem = ItemUtil.jsonToItem(mappedItemJson);
return isItemBarcodeUnique(mappedItemJson.getString("barcode"), itemCollection).compose(isUnique -> isUnique ? addItem(mappedItem, itemCollection) : Future.failedFuture(format("Barcode must be unique, %s is already assigned to another item", mappedItemJson.getString("barcode"))));
}).onComplete(ar -> {
if (ar.succeeded()) {
dataImportEventPayload.getContext().put(ITEM.value(), Json.encode(ar.result()));
future.complete(dataImportEventPayload);
} else {
if (!(ar.cause() instanceof DuplicateEventException)) {
LOG.error("Error creating inventory Item by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' ", jobExecutionId, recordId, chunkId, ar.cause());
}
future.completeExceptionally(ar.cause());
}
});
}).onFailure(failure -> {
LOG.error("Error creating inventory recordId and itemId relationship by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' ", jobExecutionId, recordId, chunkId, failure);
future.completeExceptionally(failure);
});
} catch (Exception e) {
LOG.error("Error creating inventory Item", e);
future.completeExceptionally(e);
}
return future;
}
use of org.folio.inventory.common.Context in project mod-inventory by folio-org.
the class InstanceUpdateDelegate method handle.
public Future<Instance> handle(Map<String, String> eventPayload, Record marcRecord, Context context) {
try {
JsonObject mappingRules = new JsonObject(eventPayload.get(MAPPING_RULES_KEY));
MappingParameters mappingParameters = new JsonObject(eventPayload.get(MAPPING_PARAMS_KEY)).mapTo(MappingParameters.class);
JsonObject parsedRecord = retrieveParsedContent(marcRecord.getParsedRecord());
String instanceId = marcRecord.getExternalIdsHolder().getInstanceId();
RecordMapper<org.folio.Instance> recordMapper = RecordMapperBuilder.buildMapper(MARC_FORMAT);
var mappedInstance = recordMapper.mapRecord(parsedRecord, mappingParameters, mappingRules);
InstanceCollection instanceCollection = storage.getInstanceCollection(context);
return getInstanceById(instanceId, instanceCollection).onSuccess(existingInstance -> fillVersion(existingInstance, eventPayload)).compose(existingInstance -> updateInstance(existingInstance, mappedInstance)).compose(updatedInstance -> updateInstanceInStorage(updatedInstance, instanceCollection));
} catch (Exception e) {
LOGGER.error("Error updating inventory instance", e);
return Future.failedFuture(e);
}
}
Aggregations