use of org.folio.inventory.dataimport.exceptions.OptimisticLockingException in project mod-inventory by folio-org.
the class HoldingsUpdateDelegate method updateHoldingsRecord.
private Future<HoldingsRecord> updateHoldingsRecord(HoldingsRecord holdingsRecord, HoldingsRecordCollection holdingsRecordCollection) {
Promise<HoldingsRecord> promise = Promise.promise();
holdingsRecordCollection.update(holdingsRecord, success -> promise.complete(holdingsRecord), failure -> {
if (failure.getStatusCode() == HttpStatus.SC_CONFLICT) {
promise.fail(new OptimisticLockingException(failure.getReason()));
} else {
LOGGER.error(format("Error updating Holdings - %s, status code %s", failure.getReason(), failure.getStatusCode()));
promise.fail(failure.getReason());
}
});
return promise.future();
}
use of org.folio.inventory.dataimport.exceptions.OptimisticLockingException in project mod-inventory by folio-org.
the class InstanceUpdateDelegate method updateInstanceInStorage.
private Future<Instance> updateInstanceInStorage(Instance instance, InstanceCollection instanceCollection) {
Promise<Instance> promise = Promise.promise();
instanceCollection.update(instance, success -> promise.complete(instance), failure -> {
if (failure.getStatusCode() == HttpStatus.SC_CONFLICT) {
promise.fail(new OptimisticLockingException(failure.getReason()));
} else {
LOGGER.error(format("Error updating Instance - %s, status code %s", failure.getReason(), failure.getStatusCode()));
promise.fail(failure.getReason());
}
});
return promise.future();
}
use of org.folio.inventory.dataimport.exceptions.OptimisticLockingException in project mod-inventory by folio-org.
the class MarcBibModifiedPostProcessingEventHandler method handle.
@Override
public CompletableFuture<DataImportEventPayload> handle(DataImportEventPayload dataImportEventPayload) {
CompletableFuture<DataImportEventPayload> future = new CompletableFuture<>();
try {
HashMap<String, String> payloadContext = dataImportEventPayload.getContext();
if (isNull(payloadContext) || isBlank(payloadContext.get(MARC_BIBLIOGRAPHIC.value()))) {
LOGGER.error(PAYLOAD_HAS_NO_DATA_MSG);
return CompletableFuture.failedFuture(new EventProcessingException(PAYLOAD_HAS_NO_DATA_MSG));
}
LOGGER.info("Processing ReplaceInstanceEventHandler starting with jobExecutionId: {}.", dataImportEventPayload.getJobExecutionId());
Record record = new JsonObject(payloadContext.get(MARC_BIBLIOGRAPHIC.value())).mapTo(Record.class);
String instanceId = ParsedRecordUtil.getAdditionalSubfieldValue(record.getParsedRecord(), ParsedRecordUtil.AdditionalSubfields.I);
if (isBlank(instanceId)) {
return CompletableFuture.completedFuture(dataImportEventPayload);
}
record.setExternalIdsHolder(new ExternalIdsHolder().withInstanceId(instanceId));
Context context = EventHandlingUtil.constructContext(dataImportEventPayload.getTenant(), dataImportEventPayload.getToken(), dataImportEventPayload.getOkapiUrl());
Promise<Instance> instanceUpdatePromise = Promise.promise();
mappingMetadataCache.get(dataImportEventPayload.getJobExecutionId(), context).map(parametersOptional -> parametersOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, dataImportEventPayload.getJobExecutionId())))).map(mappingMetadataDto -> buildPayloadForInstanceUpdate(dataImportEventPayload, mappingMetadataDto)).compose(payloadForUpdate -> instanceUpdateDelegate.handle(payloadForUpdate, record, context)).onSuccess(instanceUpdatePromise::complete).compose(updatedInstance -> precedingSucceedingTitlesHelper.getExistingPrecedingSucceedingTitles(updatedInstance, context)).map(precedingSucceedingTitles -> precedingSucceedingTitles.stream().map(titleJson -> titleJson.getString("id")).collect(Collectors.toSet())).compose(precedingSucceedingTitles -> precedingSucceedingTitlesHelper.deletePrecedingSucceedingTitles(precedingSucceedingTitles, context)).compose(ar -> precedingSucceedingTitlesHelper.createPrecedingSucceedingTitles(instanceUpdatePromise.future().result(), context)).onComplete(updateAr -> {
if (updateAr.succeeded()) {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
Instance resultedInstance = instanceUpdatePromise.future().result();
if (resultedInstance.getVersion() != null) {
int currentVersion = Integer.parseInt(resultedInstance.getVersion());
int incrementedVersion = currentVersion + 1;
resultedInstance.setVersion(String.valueOf(incrementedVersion));
}
dataImportEventPayload.getContext().put(INSTANCE.value(), Json.encode(resultedInstance));
future.complete(dataImportEventPayload);
} else {
if (updateAr.cause() instanceof OptimisticLockingException) {
processOLError(dataImportEventPayload, future, updateAr);
} else {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Error updating inventory instance by id: '{}' by jobExecutionId: '{}'", instanceId, dataImportEventPayload.getJobExecutionId(), updateAr.cause());
future.completeExceptionally(updateAr.cause());
}
}
});
} catch (Exception e) {
dataImportEventPayload.getContext().remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Error updating inventory instance", e);
future.completeExceptionally(e);
}
return future;
}
use of org.folio.inventory.dataimport.exceptions.OptimisticLockingException in project mod-inventory by folio-org.
the class MarcHoldingsRecordHridSetKafkaHandler method processOLError.
private void processOLError(KafkaConsumerRecord<String, String> value, Promise<String> promise, HashMap<String, String> eventPayload, AsyncResult<HoldingsRecord> ar) {
int currentRetryNumber = eventPayload.get(CURRENT_RETRY_NUMBER) == null ? 0 : Integer.parseInt(eventPayload.get(CURRENT_RETRY_NUMBER));
if (currentRetryNumber < MAX_RETRIES_COUNT) {
eventPayload.put(CURRENT_RETRY_NUMBER, String.valueOf(currentRetryNumber + 1));
LOGGER.warn("Error updating Holding - {}. Retry MarcHoldingsRecordHridSetKafkaHandler handler...", ar.cause().getMessage());
handle(value).onComplete(res -> {
if (res.succeeded()) {
promise.complete(value.key());
} else {
promise.fail(res.cause());
}
});
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
String errMessage = format("Current retry number %s exceeded given number %s for the Holding update", MAX_RETRIES_COUNT, currentRetryNumber);
LOGGER.error(errMessage);
promise.fail(new OptimisticLockingException(errMessage));
}
}
use of org.folio.inventory.dataimport.exceptions.OptimisticLockingException in project mod-inventory by folio-org.
the class MarcBibInstanceHridSetKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> promise = Promise.promise();
Event event = OBJECT_MAPPER.readValue(record.value(), Event.class);
@SuppressWarnings("unchecked") HashMap<String, String> eventPayload = OBJECT_MAPPER.readValue(event.getEventPayload(), HashMap.class);
Map<String, String> headersMap = KafkaHeaderUtils.kafkaHeadersToMap(record.headers());
String recordId = headersMap.get(RECORD_ID_HEADER);
String chunkId = headersMap.get(CHUNK_ID_HEADER);
String jobExecutionId = eventPayload.get(JOB_EXECUTION_ID_HEADER);
LOGGER.info("Event payload has been received with event type: {}, recordId: {} by jobExecution: {} and chunkId: {}", event.getEventType(), recordId, jobExecutionId, chunkId);
if (isEmpty(eventPayload.get(MARC_KEY))) {
String message = format("Event payload does not contain required data to update Instance with event type: '%s', recordId: '%s' by jobExecution: '%s' and chunkId: '%s'", event.getEventType(), recordId, jobExecutionId, chunkId);
LOGGER.error(message);
return Future.failedFuture(message);
}
Context context = EventHandlingUtil.constructContext(headersMap.get(OKAPI_TENANT_HEADER), headersMap.get(OKAPI_TOKEN_HEADER), headersMap.get(OKAPI_URL_HEADER));
Record marcRecord = new JsonObject(eventPayload.get(MARC_KEY)).mapTo(Record.class);
mappingMetadataCache.get(jobExecutionId, context).map(metadataOptional -> metadataOptional.orElseThrow(() -> new EventProcessingException(format(MAPPING_METADATA_NOT_FOUND_MSG, jobExecutionId)))).onSuccess(mappingMetadataDto -> ensureEventPayloadWithMappingMetadata(eventPayload, mappingMetadataDto)).compose(v -> instanceUpdateDelegate.handle(eventPayload, marcRecord, context)).onComplete(ar -> {
if (ar.succeeded()) {
eventPayload.remove(CURRENT_RETRY_NUMBER);
promise.complete(record.key());
} else {
if (ar.cause() instanceof OptimisticLockingException) {
processOLError(record, promise, eventPayload, ar);
} else {
eventPayload.remove(CURRENT_RETRY_NUMBER);
LOGGER.error("Failed to set MarcBib Hrid by jobExecutionId {}:{}", jobExecutionId, ar.cause());
promise.fail(ar.cause());
}
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error(format("Failed to process data import kafka record from topic %s", record.topic()), e);
return Future.failedFuture(e);
}
}
Aggregations