use of org.folio.dataimport.util.OkapiConnectionParams in project mod-source-record-storage by folio-org.
the class DataImportKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> targetRecord) {
String recordId = extractValueFromHeaders(targetRecord.headers(), RECORD_ID_HEADER);
String chunkId = extractValueFromHeaders(targetRecord.headers(), CHUNK_ID_HEADER);
try {
Promise<String> promise = Promise.promise();
Event event = ObjectMapperTool.getMapper().readValue(targetRecord.value(), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
LOGGER.debug("Data import event payload has been received with event type: '{}' by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}'", eventPayload.getEventType(), eventPayload.getJobExecutionId(), recordId, chunkId);
eventPayload.getContext().put(RECORD_ID_HEADER, recordId);
eventPayload.getContext().put(CHUNK_ID_HEADER, chunkId);
OkapiConnectionParams params = RestUtil.retrieveOkapiConnectionParams(eventPayload, vertx);
String jobProfileSnapshotId = eventPayload.getContext().get(PROFILE_SNAPSHOT_ID_KEY);
profileSnapshotCache.get(jobProfileSnapshotId, params).toCompletionStage().thenCompose(snapshotOptional -> snapshotOptional.map(profileSnapshot -> EventManager.handleEvent(eventPayload, profileSnapshot)).orElse(CompletableFuture.failedFuture(new EventProcessingException(format("Job profile snapshot with id '%s' does not exist", jobProfileSnapshotId))))).whenComplete((processedPayload, throwable) -> {
if (throwable != null) {
promise.fail(throwable);
} else if (DI_ERROR.value().equals(processedPayload.getEventType())) {
promise.fail(format("Failed to process data import event payload from topic '%s' by jobExecutionId: '%s' with recordId: '%s' and chunkId: '%s' ", targetRecord.topic(), eventPayload.getJobExecutionId(), recordId, chunkId));
} else {
promise.complete(targetRecord.key());
}
});
return promise.future();
} catch (Exception e) {
LOGGER.error("Failed to process data import kafka record from topic '{}' with recordId: '{}' and chunkId: '{}' ", targetRecord.topic(), recordId, chunkId, e);
return Future.failedFuture(e);
}
}
use of org.folio.dataimport.util.OkapiConnectionParams in project mod-source-record-storage by folio-org.
the class ParsedRecordChunksKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> targetRecord) {
Event event = Json.decodeValue(targetRecord.value(), Event.class);
RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
List<KafkaHeader> kafkaHeaders = targetRecord.headers();
OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
String tenantId = okapiConnectionParams.getTenantId();
String recordId = extractValueFromHeaders(targetRecord.headers(), RECORD_ID_HEADER);
String chunkId = extractValueFromHeaders(targetRecord.headers(), CHUNK_ID_HEADER);
String key = targetRecord.key();
int chunkNumber = chunkCounter.incrementAndGet();
DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
try {
LOGGER.debug("RecordCollection has been received with event: '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}' with recordId: '{}'' ", eventPayload.getEventType(), chunkId, chunkNumber, key, recordId);
return recordService.saveRecords(recordCollection, tenantId).compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, eventPayload.getEventType(), targetRecord));
} catch (Exception e) {
LOGGER.error("RecordCollection processing has failed with errors with event: '{}', chunkId: '{}', chunkNumber '{}'-'{}' with recordId: '{}' ", eventPayload.getEventType(), chunkId, chunkNumber, key, recordId);
return Future.failedFuture(e);
}
}
use of org.folio.dataimport.util.OkapiConnectionParams in project mod-source-record-manager by folio-org.
the class EventDrivenChunkProcessingServiceImpl method processRawRecordsChunk.
@Override
protected Future<Boolean> processRawRecordsChunk(RawRecordsDto incomingChunk, JobExecutionSourceChunk sourceChunk, String jobExecutionId, OkapiConnectionParams params) {
LOGGER.debug("Starting to process raw records chunk with id: {} for jobExecutionId: {}. Chunk size: {}.", sourceChunk.getId(), jobExecutionId, sourceChunk.getChunkSize());
Promise<Boolean> promise = Promise.promise();
initializeJobExecutionProgressIfNecessary(jobExecutionId, incomingChunk, params.getTenantId()).compose(ar -> checkAndUpdateJobExecutionStatusIfNecessary(jobExecutionId, new StatusDto().withStatus(StatusDto.Status.PARSING_IN_PROGRESS), params)).compose(jobExec -> changeEngineService.parseRawRecordsChunkForJobExecution(incomingChunk, jobExec, sourceChunk.getId(), params)).onComplete(sendEventsAr -> updateJobExecutionIfAllSourceChunksMarkedAsError(jobExecutionId, params).onComplete(updateAr -> promise.handle(sendEventsAr.map(true))));
return promise.future();
}
use of org.folio.dataimport.util.OkapiConnectionParams in project mod-source-record-manager by folio-org.
the class JobExecutionServiceImpl method updateSnapshotStatus.
private Future<JobExecution> updateSnapshotStatus(JobExecution jobExecution, OkapiConnectionParams params) {
Promise<JobExecution> promise = Promise.promise();
Snapshot snapshot = new Snapshot().withJobExecutionId(jobExecution.getId()).withStatus(Snapshot.Status.fromValue(jobExecution.getStatus().name()));
SourceStorageSnapshotsClient client = new SourceStorageSnapshotsClient(params.getOkapiUrl(), params.getTenantId(), params.getToken());
try {
client.putSourceStorageSnapshotsByJobExecutionId(jobExecution.getId(), null, snapshot, response -> {
if (response.result().statusCode() == HttpStatus.HTTP_OK.toInt()) {
promise.complete(jobExecution);
} else {
jobExecutionDao.updateBlocking(jobExecution.getId(), jobExec -> {
Promise<JobExecution> jobExecutionPromise = Promise.promise();
jobExec.setErrorStatus(JobExecution.ErrorStatus.SNAPSHOT_UPDATE_ERROR);
jobExec.setStatus(JobExecution.Status.ERROR);
jobExec.setUiStatus(JobExecution.UiStatus.ERROR);
jobExec.setCompletedDate(new Date());
jobExecutionPromise.complete(jobExec);
return jobExecutionPromise.future();
}, params.getTenantId()).onComplete(jobExecutionUpdate -> {
String message = "Couldn't update snapshot status for jobExecution with id " + jobExecution.getId();
LOGGER.error(message);
promise.fail(message);
});
}
});
} catch (Exception e) {
LOGGER.error("Error during update for Snapshot with id {}", jobExecution.getId(), e);
promise.fail(e);
}
return promise.future();
}
use of org.folio.dataimport.util.OkapiConnectionParams in project mod-source-record-manager by folio-org.
the class DataImportKafkaHandler method handle.
@Override
public Future<String> handle(KafkaConsumerRecord<String, String> record) {
try {
Promise<String> result = Promise.promise();
List<KafkaHeader> kafkaHeaders = record.headers();
OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
String recordId = okapiConnectionParams.getHeaders().get(RECORD_ID_HEADER);
Event event = Json.decodeValue(record.value(), Event.class);
String jobExecutionId = extractJobExecutionId(kafkaHeaders);
LOGGER.info("Event was received with recordId: '{}' event type: '{}' with jobExecutionId: '{}'", recordId, event.getEventType(), jobExecutionId);
if (StringUtils.isBlank(recordId)) {
handleLocalEvent(result, okapiConnectionParams, event);
return result.future();
}
eventProcessedService.collectData(DATA_IMPORT_KAFKA_HANDLER_UUID, event.getId(), okapiConnectionParams.getTenantId()).onSuccess(res -> handleLocalEvent(result, okapiConnectionParams, event)).onFailure(e -> {
if (e instanceof DuplicateEventException) {
LOGGER.info(e.getMessage());
result.complete();
} else {
LOGGER.error("Error with database during collecting of deduplication info for handlerId: {} , eventId: {}. ", DATA_IMPORT_KAFKA_HANDLER_UUID, event.getId(), e);
result.fail(e);
}
});
return result.future();
} catch (Exception e) {
LOGGER.error("Error during processing data-import result", e);
return Future.failedFuture(e);
}
}
Aggregations