use of org.folio.rest.jaxrs.model.DataImportEventPayload in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldDeleteMarcAuthorityRecord.
@Test
public void shouldDeleteMarcAuthorityRecord() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(List.of(new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(ACTION_PROFILE).withOrder(0).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(DELETE).withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY)).getMap())));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
HashMap<String, String> payloadContext = new HashMap<>();
payloadContext.put("MATCHED_MARC_AUTHORITY", Json.encode(record));
payloadContext.put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
var eventPayload = new DataImportEventPayload().withContext(payloadContext).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
String topic = getTopicName(DI_MARC_FOR_DELETE_RECEIVED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
var request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
String observeTopic = getTopicName(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.name());
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
var resultPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.value(), resultPayload.getEventType());
assertEquals(record.getExternalIdsHolder().getAuthorityId(), resultPayload.getContext().get("AUTHORITY_RECORD_ID"));
assertEquals(ACTION_PROFILE, resultPayload.getCurrentNode().getContentType());
}
use of org.folio.rest.jaxrs.model.DataImportEventPayload in project mod-source-record-storage by folio-org.
the class ParsedRecordChunkConsumersVerticleTest method check_DI_ERROR_eventsSent.
private void check_DI_ERROR_eventsSent(String jobExecutionId, List<Record> records, String... errorMessages) throws InterruptedException {
List<DataImportEventPayload> testedEventsPayLoads = new ArrayList<>();
String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_ERROR.value());
List<String> observedValues = cluster.readValues(ReadKeyValues.from(observeTopic).build());
if (CollectionUtils.isEmpty(observedValues)) {
observedValues = cluster.observeValues(ObserveKeyValues.on(observeTopic, records.size()).observeFor(30, TimeUnit.SECONDS).build());
}
for (String observedValue : observedValues) {
Event obtainedEvent = Json.decodeValue(observedValue, Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
if (jobExecutionId.equals(eventPayload.getJobExecutionId())) {
testedEventsPayLoads.add(eventPayload);
}
}
assertEquals(EXPECTED_ERROR_EVENTS_NUMBER, testedEventsPayLoads.size());
for (DataImportEventPayload eventPayload : testedEventsPayLoads) {
String recordId = eventPayload.getContext().get(ParsedRecordChunksErrorHandler.RECORD_ID_HEADER);
String error = eventPayload.getContext().get(ParsedRecordChunksErrorHandler.ERROR_KEY);
assertEquals(DI_ERROR.value(), eventPayload.getEventType());
assertEquals(TENANT_ID, eventPayload.getTenant());
assertTrue(StringUtils.isNotBlank(recordId));
for (String errorMessage : errorMessages) {
assertTrue(error.contains(errorMessage));
}
assertFalse(eventPayload.getEventsChain().isEmpty());
assertEquals(DI_LOG_SRS_MARC_BIB_RECORD_CREATED.value(), eventPayload.getEventsChain().get(0));
}
}
use of org.folio.rest.jaxrs.model.DataImportEventPayload in project mod-source-record-manager by folio-org.
the class RecordsPublishingServiceImpl method sendRecords.
private Future<Boolean> sendRecords(List<Record> createdRecords, JobExecution jobExecution, OkapiConnectionParams params, String eventType) {
Promise<Boolean> promise = Promise.promise();
List<Future<Boolean>> futures = new ArrayList<>();
List<Record> failedRecords = new ArrayList<>();
ProfileSnapshotWrapper profileSnapshotWrapper = new ObjectMapper().convertValue(jobExecution.getJobProfileSnapshotWrapper(), ProfileSnapshotWrapper.class);
for (Record record : createdRecords) {
String key = String.valueOf(indexer.incrementAndGet() % maxDistributionNum);
try {
if (isRecordReadyToSend(record)) {
DataImportEventPayload payload = prepareEventPayload(record, profileSnapshotWrapper, params, eventType);
params.getHeaders().set(RECORD_ID_HEADER, record.getId());
params.getHeaders().set(USER_ID_HEADER, jobExecution.getUserId());
futures.add(sendEventToKafka(params.getTenantId(), Json.encode(payload), eventType, KafkaHeaderUtils.kafkaHeadersFromMultiMap(params.getHeaders()), kafkaConfig, key));
}
} catch (Exception e) {
LOGGER.error("Error publishing event with record id: {}", record.getId(), e);
record.setErrorRecord(new ErrorRecord().withContent(record.getRawRecord()).withDescription(e.getMessage()));
failedRecords.add(record);
}
}
if (CollectionUtils.isNotEmpty(failedRecords)) {
futures.add(Future.failedFuture(new RecordsPublishingException(String.format("Failed to process %s records", failedRecords.size()), failedRecords)));
}
GenericCompositeFuture.join(futures).onComplete(ar -> {
if (ar.failed()) {
LOGGER.error("Error publishing events with records", ar.cause());
promise.fail(ar.cause());
return;
}
promise.complete(true);
});
return promise.future();
}
use of org.folio.rest.jaxrs.model.DataImportEventPayload in project mod-source-record-manager by folio-org.
the class RawMarcChunkConsumersVerticleTest method checkDiErrorEventsSent.
private void checkDiErrorEventsSent(String jobExecutionId, String errorMessage) throws InterruptedException {
String observeTopic = formatToKafkaTopicName(DI_ERROR.value());
List<String> observedValues = kafkaCluster.readValues(ReadKeyValues.from(observeTopic).build());
if (CollectionUtils.isEmpty(observedValues)) {
observedValues = kafkaCluster.observeValues(ObserveKeyValues.on(observeTopic, 1).observeFor(60, TimeUnit.SECONDS).build());
}
List<DataImportEventPayload> testedEventsPayLoads = filterObservedValues(jobExecutionId, observedValues);
assertEquals(1, testedEventsPayLoads.size());
for (DataImportEventPayload payload : testedEventsPayLoads) {
String actualErrorMessage = payload.getContext().get(RawMarcChunksErrorHandler.ERROR_KEY);
assertTrue(actualErrorMessage.contains(errorMessage));
}
}
use of org.folio.rest.jaxrs.model.DataImportEventPayload in project mod-source-record-manager by folio-org.
the class StoredRecordChunksErrorHandler method sendDiErrorForRecord.
private void sendDiErrorForRecord(String jobExecutionId, Record targetRecord, OkapiConnectionParams okapiParams, String errorMsg) {
DataImportEventPayload errorPayload = new DataImportEventPayload().withEventType(DI_ERROR.value()).withJobExecutionId(jobExecutionId).withOkapiUrl(okapiParams.getOkapiUrl()).withTenant(okapiParams.getTenantId()).withToken(okapiParams.getToken()).withContext(new HashMap<>() {
{
put(RecordConversionUtil.getEntityType(targetRecord).value(), Json.encode(targetRecord));
put(ERROR_KEY, errorMsg);
}
});
okapiParams.getHeaders().set(RECORD_ID_HEADER, targetRecord.getId());
String chunkNumber = okapiParams.getHeaders().get(CHUNK_NUMBER);
sendEventToKafka(okapiParams.getTenantId(), Json.encode(errorPayload), DI_ERROR.value(), KafkaHeaderUtils.kafkaHeadersFromMultiMap(okapiParams.getHeaders()), kafkaConfig, null).onFailure(th -> LOGGER.error("Error publishing DI_ERROR event for jobExecutionId: {} , recordId: {}, chunkNumber: {}", errorPayload.getJobExecutionId(), targetRecord.getId(), chunkNumber, th));
}
Aggregations