Search in sources :

Example 86 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunksErrorHandler method handle.

@Override
public void handle(Throwable throwable, KafkaConsumerRecord<String, String> record) {
    Event event = Json.decodeValue(record.value(), Event.class);
    RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
    List<KafkaHeader> kafkaHeaders = record.headers();
    OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
    String jobExecutionId = okapiConnectionParams.getHeaders().get(JOB_EXECUTION_ID_HEADER);
    String correlationId = okapiConnectionParams.getHeaders().get(CORRELATION_ID_HEADER);
    String tenantId = okapiConnectionParams.getTenantId();
    if (throwable instanceof DuplicateEventException) {
        LOGGER.warn("Duplicate event received, skipping processing for jobExecutionId: {} , tenantId: {}, correlationId:{}, totalRecords: {}, cause: {}", jobExecutionId, tenantId, correlationId, recordCollection.getTotalRecords(), throwable.getMessage());
    } else {
        sendErrorRecordsSavingEvents(recordCollection, throwable.getMessage(), kafkaHeaders, jobExecutionId, tenantId);
    }
}
Also used : DuplicateEventException(org.folio.kafka.exception.DuplicateEventException) RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event) OkapiConnectionParams(org.folio.dataimport.util.OkapiConnectionParams) KafkaHeader(io.vertx.kafka.client.producer.KafkaHeader)

Example 87 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class EventHandlingUtil method createProducerRecord.

public static KafkaProducerRecord<String, String> createProducerRecord(String eventPayload, String eventType, String key, String tenantId, List<KafkaHeader> kafkaHeaders, KafkaConfig kafkaConfig) {
    Event event = new Event().withId(UUID.randomUUID().toString()).withEventType(eventType).withEventPayload(eventPayload).withEventMetadata(new EventMetadata().withTenantId(tenantId).withEventTTL(1).withPublishedBy(constructModuleName()));
    String topicName = createTopicName(eventType, tenantId, kafkaConfig);
    KafkaProducerRecord<String, String> record = KafkaProducerRecord.create(topicName, key, Json.encode(event));
    record.addHeaders(kafkaHeaders);
    return record;
}
Also used : Event(org.folio.rest.jaxrs.model.Event) EventMetadata(org.folio.rest.jaxrs.model.EventMetadata)

Example 88 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class DataImportConsumersVerticleTest method shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode.

@Test
public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode() throws InterruptedException {
    ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(ACTION_PROFILE).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(MODIFY).withFolioRecord(ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(MAPPING_PROFILE).withContent(JsonObject.mapFrom(new MappingProfile().withId(UUID.randomUUID().toString()).withIncomingRecordType(MARC_BIBLIOGRAPHIC).withExistingRecordType(MARC_BIBLIOGRAPHIC).withMappingDetails(new MappingDetail().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY).withMarcMappingDetails(List.of(marcMappingDetail)))).getMap())))));
    WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
    String expectedParsedContent = "{\"leader\":\"00107nam  22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
    DataImportEventPayload eventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0)).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withContext(new HashMap<>() {

        {
            put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
            put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
        }
    });
    String topic = getTopicName(DI_SRS_MARC_BIB_RECORD_CREATED.value());
    KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
    kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
    kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
    SendKeyValues<String, String> request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
    // when
    cluster.send(request);
    // then
    var value = DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value();
    String observeTopic = getTopicName(value);
    List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
    Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
    DataImportEventPayload dataImportEventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
    assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value(), dataImportEventPayload.getEventType());
    Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
    assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
    assertEquals(Record.State.ACTUAL, actualRecord.getState());
    assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
    assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) RegexPattern(com.github.tomakehurst.wiremock.matching.RegexPattern) JobProfile(org.folio.JobProfile) ProfileSnapshotWrapper(org.folio.rest.jaxrs.model.ProfileSnapshotWrapper) DataImportEventPayload(org.folio.rest.jaxrs.model.DataImportEventPayload) MappingProfile(org.folio.MappingProfile) MarcMappingDetail(org.folio.rest.jaxrs.model.MarcMappingDetail) MappingDetail(org.folio.rest.jaxrs.model.MappingDetail) UrlPathPattern(com.github.tomakehurst.wiremock.matching.UrlPathPattern) Event(org.folio.rest.jaxrs.model.Event) ParsedRecord(org.folio.rest.jaxrs.model.ParsedRecord) RawRecord(org.folio.rest.jaxrs.model.RawRecord) Record(org.folio.rest.jaxrs.model.Record) ActionProfile(org.folio.ActionProfile) Test(org.junit.Test) AbstractLBServiceTest(org.folio.services.AbstractLBServiceTest)

Example 89 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunkConsumersVerticleTest method sendRecordsToKafka.

private void sendRecordsToKafka(String jobExecutionId, List<Record> records) throws InterruptedException {
    RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
    String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
    Event event = new Event().withEventPayload(Json.encode(recordCollection));
    KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
    record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
    record.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, Charset.defaultCharset());
    SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
    cluster.send(request);
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event)

Example 90 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunkConsumersVerticleTest method sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent.

private void sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent(RecordType recordType, RawRecord rawRecord, ParsedRecord parsedRecord) throws InterruptedException {
    List<Record> records = new ArrayList<>();
    records.add(new Record().withId(recordId).withMatchedId(recordId).withSnapshotId(snapshotId).withGeneration(0).withRecordType(recordType).withRawRecord(rawRecord).withParsedRecord(parsedRecord));
    RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
    String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
    Event event = new Event().withEventPayload(Json.encode(recordCollection));
    KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
    record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
    SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
    cluster.send(request);
    String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_PARSED_RECORDS_CHUNK_SAVED.value());
    cluster.observeValues(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) ArrayList(java.util.ArrayList) RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event) ParsedRecord(org.folio.rest.jaxrs.model.ParsedRecord) RawRecord(org.folio.rest.jaxrs.model.RawRecord) Record(org.folio.rest.jaxrs.model.Record)

Aggregations

Event (org.folio.rest.jaxrs.model.Event)90 Test (org.junit.Test)41 HashMap (java.util.HashMap)32 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)28 DataImportEventPayload (org.folio.DataImportEventPayload)27 KeyValue (net.mguenther.kafka.junit.KeyValue)22 Async (io.vertx.ext.unit.Async)21 KafkaConsumerRecord (io.vertx.kafka.client.consumer.KafkaConsumerRecord)16 OkapiConnectionParams (org.folio.dataimport.util.OkapiConnectionParams)16 Record (org.folio.rest.jaxrs.model.Record)14 LogManager (org.apache.logging.log4j.LogManager)12 Logger (org.apache.logging.log4j.Logger)12 ProfileSnapshotWrapper (org.folio.rest.jaxrs.model.ProfileSnapshotWrapper)12 Test (org.junit.jupiter.api.Test)12 KafkaHeader (io.vertx.kafka.client.producer.KafkaHeader)11 DuplicateEventException (org.folio.kafka.exception.DuplicateEventException)10 AbstractRestTest (org.folio.rest.impl.AbstractRestTest)10 RecordCollection (org.folio.rest.jaxrs.model.RecordCollection)10 RecordsBatchResponse (org.folio.rest.jaxrs.model.RecordsBatchResponse)10 Future (io.vertx.core.Future)9