use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.
the class ParsedRecordChunksErrorHandler method handle.
@Override
public void handle(Throwable throwable, KafkaConsumerRecord<String, String> record) {
Event event = Json.decodeValue(record.value(), Event.class);
RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
List<KafkaHeader> kafkaHeaders = record.headers();
OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
String jobExecutionId = okapiConnectionParams.getHeaders().get(JOB_EXECUTION_ID_HEADER);
String correlationId = okapiConnectionParams.getHeaders().get(CORRELATION_ID_HEADER);
String tenantId = okapiConnectionParams.getTenantId();
if (throwable instanceof DuplicateEventException) {
LOGGER.warn("Duplicate event received, skipping processing for jobExecutionId: {} , tenantId: {}, correlationId:{}, totalRecords: {}, cause: {}", jobExecutionId, tenantId, correlationId, recordCollection.getTotalRecords(), throwable.getMessage());
} else {
sendErrorRecordsSavingEvents(recordCollection, throwable.getMessage(), kafkaHeaders, jobExecutionId, tenantId);
}
}
use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.
the class EventHandlingUtil method createProducerRecord.
public static KafkaProducerRecord<String, String> createProducerRecord(String eventPayload, String eventType, String key, String tenantId, List<KafkaHeader> kafkaHeaders, KafkaConfig kafkaConfig) {
Event event = new Event().withId(UUID.randomUUID().toString()).withEventType(eventType).withEventPayload(eventPayload).withEventMetadata(new EventMetadata().withTenantId(tenantId).withEventTTL(1).withPublishedBy(constructModuleName()));
String topicName = createTopicName(eventType, tenantId, kafkaConfig);
KafkaProducerRecord<String, String> record = KafkaProducerRecord.create(topicName, key, Json.encode(event));
record.addHeaders(kafkaHeaders);
return record;
}
use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode.
@Test
public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(ACTION_PROFILE).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(MODIFY).withFolioRecord(ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(MAPPING_PROFILE).withContent(JsonObject.mapFrom(new MappingProfile().withId(UUID.randomUUID().toString()).withIncomingRecordType(MARC_BIBLIOGRAPHIC).withExistingRecordType(MARC_BIBLIOGRAPHIC).withMappingDetails(new MappingDetail().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY).withMarcMappingDetails(List.of(marcMappingDetail)))).getMap())))));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
String expectedParsedContent = "{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
DataImportEventPayload eventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0)).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withContext(new HashMap<>() {
{
put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
}
});
String topic = getTopicName(DI_SRS_MARC_BIB_RECORD_CREATED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
// then
var value = DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value();
String observeTopic = getTopicName(value);
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
DataImportEventPayload dataImportEventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value(), dataImportEventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
assertEquals(Record.State.ACTUAL, actualRecord.getState());
assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.
the class ParsedRecordChunkConsumersVerticleTest method sendRecordsToKafka.
private void sendRecordsToKafka(String jobExecutionId, List<Record> records) throws InterruptedException {
RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
Event event = new Event().withEventPayload(Json.encode(recordCollection));
KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
record.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, Charset.defaultCharset());
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
cluster.send(request);
}
use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.
the class ParsedRecordChunkConsumersVerticleTest method sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent.
private void sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent(RecordType recordType, RawRecord rawRecord, ParsedRecord parsedRecord) throws InterruptedException {
List<Record> records = new ArrayList<>();
records.add(new Record().withId(recordId).withMatchedId(recordId).withSnapshotId(snapshotId).withGeneration(0).withRecordType(recordType).withRawRecord(rawRecord).withParsedRecord(parsedRecord));
RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
Event event = new Event().withEventPayload(Json.encode(recordCollection));
KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
cluster.send(request);
String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_PARSED_RECORDS_CHUNK_SAVED.value());
cluster.observeValues(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
}
Aggregations