use of org.folio.ActionProfile.FolioRecord.INSTANCE in project mod-inventory by folio-org.
the class CreateInstanceEventHandler method handle.
@Override
public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload dataImportEventPayload) {
CompletableFuture<DataImportEventPayload> future = new CompletableFuture<>();
try {
dataImportEventPayload.setEventType(DI_INVENTORY_INSTANCE_CREATED.value());
HashMap<String, String> payloadContext = dataImportEventPayload.getContext();
if (payloadContext == null || payloadContext.isEmpty() || isEmpty(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()))) {
LOGGER.error(PAYLOAD_HAS_NO_DATA_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = dataImportEventPayload.getContext().get(RECORD_ID_HEADER);
if (dataImportEventPayload.getCurrentNode().getChildSnapshotWrappers().isEmpty()) {
LOGGER.error(ACTION_HAS_NO_MAPPING_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(format(ACTION_HAS_NO_MAPPING_MSG, jobExecutionId, recordId)));
}
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Record targetRecord = Json.decodeValue(payloadContext.get(EntityType.MARC_BIBLIOGRAPHIC.value()), Record.class);
String chunkId = dataImportEventPayload.getContext().get(CHUNK_ID_HEADER);
Future<RecordToEntity> recordToInstanceFuture = idStorageService.store(targetRecord.getId(), UUID.randomUUID().toString(), dataImportEventPayload.getTenant());
recordToInstanceFuture.onSuccess(res -> {
String instanceId = res.getEntityId();
mappingMetadataCache.get(jobExecutionId, context).compose(parametersOptional -> parametersOptional.map(mappingMetadata -> prepareAndExecuteMapping(dataImportEventPayload, new JsonObject(mappingMetadata.getMappingRules()), Json.decodeValue(mappingMetadata.getMappingParams(), MappingParameters.class))).orElseGet(() -> Future.failedFuture(format(MAPPING_PARAMETERS_NOT_FOUND_MSG, jobExecutionId, recordId, chunkId)))).compose(v -> {
InstanceCollection instanceCollection = storage.getInstanceCollection(context);
JsonObject instanceAsJson = prepareInstance(dataImportEventPayload, instanceId, jobExecutionId);
List<String> errors = EventHandlingUtil.validateJsonByRequiredFields(instanceAsJson, requiredFields);
if (!errors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", errors, jobExecutionId, recordId, chunkId);
LOGGER.warn(msg);
return Future.failedFuture(msg);
}
Instance mappedInstance = Instance.fromJson(instanceAsJson);
return addInstance(mappedInstance, instanceCollection).compose(createdInstance -> precedingSucceedingTitlesHelper.createPrecedingSucceedingTitles(mappedInstance, context).map(createdInstance));
}).onSuccess(ar -> {
dataImportEventPayload.getContext().put(INSTANCE.value(), Json.encode(ar));
future.complete(dataImportEventPayload);
}).onFailure(e -> {
if (!(e instanceof DuplicateEventException)) {
LOGGER.error("Error creating inventory Instance by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' ", jobExecutionId, recordId, chunkId, e);
}
future.completeExceptionally(e);
});
}).onFailure(failure -> {
LOGGER.error("Error creating inventory recordId and instanceId relationship by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}' ", jobExecutionId, recordId, chunkId, failure);
future.completeExceptionally(failure);
});
} catch (Exception e) {
LOGGER.error("Error creating inventory Instance", e);
future.completeExceptionally(e);
}
return future;
}
use of org.folio.ActionProfile.FolioRecord.INSTANCE in project mod-inventory by folio-org.
the class MarcBibModifiedPostProcessingEventHandler method handle.
@Override
public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload dataImportEventPayload) {
CompletableFuture<DataImportEventPayload> future = new CompletableFuture<>();
try {
HashMap<String, String> payloadContext = dataImportEventPayload.getContext();
if (isNull(payloadContext) || isBlank(payloadContext.get(MARC_BIBLIOGRAPHIC.value()))) {
LOGGER.error(PAYLOAD_HAS_NO_DATA_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
LOGGER.info("Processing ReplaceInstanceEventHandler starting with jobExecutionId: {}.", dataImportEventPayload.getJobExecutionId());
Record record = new JsonObject(payloadContext.get(MARC_BIBLIOGRAPHIC.value())).mapTo(Record.class);
String instanceId = ParsedRecordUtil.getAdditionalSubfieldValue(record.getParsedRecord(), ParsedRecordUtil.AdditionalSubfields.I);
if (isBlank(instanceId)) {
return CompletableFuture.completedFuture(dataImportEventPayload);
}
record.setExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId));
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Promise<Instance> instanceUpdatePromise = Promise.promise();
mappingMetadataCache.get(dataImportEventPayload.getJobExecutionId(), context).map(parametersOptional -> parametersOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, dataImportEventPayload.getJobExecutionId())))).map(mappingMetadataDto -> buildPayloadForInstanceUpdate(dataImportEventPayload, mappingMetadataDto)).compose(payloadForUpdate -> instanceUpdateDelegate.handle(payloadForUpdate, record, context)).onSuccess(instanceUpdatePromise::complete).compose(updatedInstance -> precedingSucceedingTitlesHelper.getExistingPrecedingSucceedingTitles(updatedInstance, context)).map(precedingSucceedingTitles -> precedingSucceedingTitles.stream().map(titleJson -> titleJson.getString("id")).collect(Collectors.toSet())).compose(precedingSucceedingTitles -> precedingSucceedingTitlesHelper.deletePrecedingSucceedingTitles(precedingSucceedingTitles, context)).compose(ar -> precedingSucceedingTitlesHelper.createPrecedingSucceedingTitles(instanceUpdatePromise.future().result(), context)).onComplete(updateAr -> {
if (updateAr.succeeded()) {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
Instance resultedInstance = instanceUpdatePromise.future().result();
if (resultedInstance.getVersion() != null) {
int currentVersion = Integer.parseInt(resultedInstance.getVersion());
int incrementedVersion = currentVersion + 1;
resultedInstance.setVersion(String.valueOf(incrementedVersion));
}
dataImportEventPayload.getContext().put(INSTANCE.value(), Json.encode(resultedInstance));
future.complete(dataImportEventPayload);
} else {
if (updateAr.cause() instanceof OptimisticLockingException) {
processOLError(dataImportEventPayload, future, updateAr);
} else {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Error updating inventory instance by id: '{}' by jobExecutionId: '{}'", instanceId, dataImportEventPayload.getJobExecutionId(), updateAr.cause());
future.completeExceptionally(updateAr.cause());
}
}
});
} catch (Exception e) {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Error updating inventory instance", e);
future.completeExceptionally(e);
}
return future;
}
use of org.folio.ActionProfile.FolioRecord.INSTANCE in project mod-inventory by folio-org.
the class ReplaceInstanceEventHandler method prepareTargetInstance.
private JsonObject prepareTargetInstance(DataImportEventPayload dataImportEventPayload, Instance instanceToUpdate) {
JsonObject instanceAsJson = new JsonObject(dataImportEventPayload.getContext().get(INSTANCE.value()));
if (instanceAsJson.getJsonObject(INSTANCE_PATH) != null) {
instanceAsJson = instanceAsJson.getJsonObject(INSTANCE_PATH);
}
Set<String> precedingSucceedingIds = new HashSet<>();
precedingSucceedingIds.addAll(instanceToUpdate.getPrecedingTitles().stream().filter(pr -> isNotEmpty(pr.id)).map(pr -> pr.id).collect(Collectors.toList()));
precedingSucceedingIds.addAll(instanceToUpdate.getSucceedingTitles().stream().filter(pr -> isNotEmpty(pr.id)).map(pr -> pr.id).collect(Collectors.toList()));
instanceAsJson.put("id", instanceToUpdate.getId());
instanceAsJson.put(HRID_KEY, instanceToUpdate.getHrid());
instanceAsJson.put(SOURCE_KEY, MARC_FORMAT);
instanceAsJson.put(METADATA_KEY, instanceToUpdate.getMetadata());
return instanceAsJson;
}
use of org.folio.ActionProfile.FolioRecord.INSTANCE in project mod-inventory by folio-org.
the class ReplaceInstanceEventHandler method handle.
@Override
public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload dataImportEventPayload) {
// NOSONAR
CompletableFuture<DataImportEventPayload> future = new CompletableFuture<>();
try {
dataImportEventPayload.setEventType(DI_INVENTORY_INSTANCE_UPDATED.value());
HashMap<String, String> payloadContext = dataImportEventPayload.getContext();
if (payloadContext == null || payloadContext.isEmpty() || isEmpty(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value())) || isEmpty(dataImportEventPayload.getContext().get(INSTANCE.value()))) {
LOGGER.error(PAYLOAD_HAS_NO_DATA_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
if (dataImportEventPayload.getCurrentNode().getChildSnapshotWrappers().isEmpty()) {
LOGGER.error(ACTION_HAS_NO_MAPPING_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(ACTION_HAS_NO_MAPPING_MSG));
}
LOGGER.info("Processing ReplaceInstanceEventHandler starting with jobExecutionId: {}.", dataImportEventPayload.getJobExecutionId());
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Instance instanceToUpdate = Instance.fromJson(new JsonObject(dataImportEventPayload.getContext().get(INSTANCE.value())));
prepareEvent(dataImportEventPayload);
String jobExecutionId = dataImportEventPayload.getJobExecutionId();
String recordId = dataImportEventPayload.getContext().get(RECORD_ID_HEADER);
String chunkId = dataImportEventPayload.getContext().get(CHUNK_ID_HEADER);
mappingMetadataCache.get(jobExecutionId, context).compose(parametersOptional -> parametersOptional.map(mappingMetadata -> prepareAndExecuteMapping(dataImportEventPayload, new JsonObject(mappingMetadata.getMappingRules()), new JsonObject(mappingMetadata.getMappingParams()).mapTo(MappingParameters.class), instanceToUpdate)).orElseGet(() -> Future.failedFuture(format(MAPPING_PARAMETERS_NOT_FOUND_MSG, jobExecutionId, recordId, chunkId)))).compose(e -> {
JsonObject instanceAsJson = prepareTargetInstance(dataImportEventPayload, instanceToUpdate);
InstanceCollection instanceCollection = storage.getInstanceCollection(context);
List<String> errors = EventHandlingUtil.validateJsonByRequiredFields(instanceAsJson, requiredFields);
if (!errors.isEmpty()) {
String msg = format("Mapped Instance is invalid: %s, by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s' ", errors, jobExecutionId, recordId, chunkId);
LOGGER.warn(msg);
return Future.failedFuture(msg);
}
Instance mappedInstance = Instance.fromJson(instanceAsJson);
return updateInstanceAndRetryIfOlExists(mappedInstance, instanceCollection, dataImportEventPayload).compose(updatedInstance -> precedingSucceedingTitlesHelper.getExistingPrecedingSucceedingTitles(mappedInstance, context)).map(precedingSucceedingTitles -> precedingSucceedingTitles.stream().map(titleJson -> titleJson.getString("id")).collect(Collectors.toSet())).compose(titlesIds -> precedingSucceedingTitlesHelper.deletePrecedingSucceedingTitles(titlesIds, context)).compose(ar -> precedingSucceedingTitlesHelper.createPrecedingSucceedingTitles(mappedInstance, context)).map(instanceAsJson);
}).onComplete(ar -> {
if (ar.succeeded()) {
dataImportEventPayload.getContext().put(INSTANCE.value(), ar.result().encode());
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
future.complete(dataImportEventPayload);
} else {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
String errMessage = format("Error updating inventory Instance by jobExecutionId: '%s' and recordId: '%s' and chunkId: '%s': %s ", jobExecutionId, recordId, chunkId, ar.cause());
LOGGER.error(errMessage);
future.completeExceptionally(ar.cause());
}
});
} catch (Exception e) {
LOGGER.error("Error updating inventory Instance", e);
future.completeExceptionally(e);
}
return future;
}
Aggregations