Search in sources :

Example 21 with KeyValue

use of net.mguenther.kafka.junit.KeyValue in project mod-inventory by folio-org.

the class DataImportConsumerVerticleTest method shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSucceeded.

@Test
public void shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSucceeded() throws InterruptedException {
    // given
    DataImportEventPayload dataImportEventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withTenant(TENANT_ID).withOkapiUrl(mockServer.baseUrl()).withToken("test-token").withJobExecutionId(UUID.randomUUID().toString()).withContext(new HashMap<>(Map.of("JOB_PROFILE_SNAPSHOT_ID", profileSnapshotWrapper.getId())));
    String topic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_NAME, getDefaultNameSpace(), TENANT_ID, dataImportEventPayload.getEventType());
    Event event = new Event().withId("01").withEventPayload(Json.encode(dataImportEventPayload));
    KeyValue<String, String> record = new KeyValue<>("test-key", Json.encode(event));
    record.addHeader(RECORD_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
    record.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
    SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
    // when
    cluster.send(request);
    // then
    String observeTopic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_NAME, getDefaultNameSpace(), TENANT_ID, DI_COMPLETED.value());
    List<KeyValue<String, String>> observedValues = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
    assertEquals(1, observedValues.size());
    assertNotNull(observedValues.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) Event(org.folio.rest.jaxrs.model.Event) DataImportEventPayload(org.folio.DataImportEventPayload) Test(org.junit.Test)

Example 22 with KeyValue

use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.

the class DataImportConsumersVerticleTest method shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode.

@Test
public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode() throws InterruptedException {
    ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(ACTION_PROFILE).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(MODIFY).withFolioRecord(ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(MAPPING_PROFILE).withContent(JsonObject.mapFrom(new MappingProfile().withId(UUID.randomUUID().toString()).withIncomingRecordType(MARC_BIBLIOGRAPHIC).withExistingRecordType(MARC_BIBLIOGRAPHIC).withMappingDetails(new MappingDetail().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY).withMarcMappingDetails(List.of(marcMappingDetail)))).getMap())))));
    WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
    String expectedParsedContent = "{\"leader\":\"00107nam  22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
    DataImportEventPayload eventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0)).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withContext(new HashMap<>() {

        {
            put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
            put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
        }
    });
    String topic = getTopicName(DI_SRS_MARC_BIB_RECORD_CREATED.value());
    KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
    kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
    kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
    SendKeyValues<String, String> request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
    // when
    cluster.send(request);
    // then
    var value = DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value();
    String observeTopic = getTopicName(value);
    List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
    Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
    DataImportEventPayload dataImportEventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
    assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value(), dataImportEventPayload.getEventType());
    Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
    assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
    assertEquals(Record.State.ACTUAL, actualRecord.getState());
    assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
    assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) RegexPattern(com.github.tomakehurst.wiremock.matching.RegexPattern) JobProfile(org.folio.JobProfile) ProfileSnapshotWrapper(org.folio.rest.jaxrs.model.ProfileSnapshotWrapper) DataImportEventPayload(org.folio.rest.jaxrs.model.DataImportEventPayload) MappingProfile(org.folio.MappingProfile) MarcMappingDetail(org.folio.rest.jaxrs.model.MarcMappingDetail) MappingDetail(org.folio.rest.jaxrs.model.MappingDetail) UrlPathPattern(com.github.tomakehurst.wiremock.matching.UrlPathPattern) Event(org.folio.rest.jaxrs.model.Event) ParsedRecord(org.folio.rest.jaxrs.model.ParsedRecord) RawRecord(org.folio.rest.jaxrs.model.RawRecord) Record(org.folio.rest.jaxrs.model.Record) ActionProfile(org.folio.ActionProfile) Test(org.junit.Test) AbstractLBServiceTest(org.folio.services.AbstractLBServiceTest)

Example 23 with KeyValue

use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.

the class ParsedRecordChunkConsumersVerticleTest method sendRecordsToKafka.

private void sendRecordsToKafka(String jobExecutionId, List<Record> records) throws InterruptedException {
    RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
    String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
    Event event = new Event().withEventPayload(Json.encode(recordCollection));
    KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
    record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
    record.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, Charset.defaultCharset());
    SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
    cluster.send(request);
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event)

Example 24 with KeyValue

use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.

the class ParsedRecordChunkConsumersVerticleTest method sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent.

private void sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent(RecordType recordType, RawRecord rawRecord, ParsedRecord parsedRecord) throws InterruptedException {
    List<Record> records = new ArrayList<>();
    records.add(new Record().withId(recordId).withMatchedId(recordId).withSnapshotId(snapshotId).withGeneration(0).withRecordType(recordType).withRawRecord(rawRecord).withParsedRecord(parsedRecord));
    RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
    String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
    Event event = new Event().withEventPayload(Json.encode(recordCollection));
    KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
    record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
    record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
    SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
    cluster.send(request);
    String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_PARSED_RECORDS_CHUNK_SAVED.value());
    cluster.observeValues(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) ArrayList(java.util.ArrayList) RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event) ParsedRecord(org.folio.rest.jaxrs.model.ParsedRecord) RawRecord(org.folio.rest.jaxrs.model.RawRecord) Record(org.folio.rest.jaxrs.model.Record)

Aggregations

KeyValue (net.mguenther.kafka.junit.KeyValue)24 Event (org.folio.rest.jaxrs.model.Event)22 DataImportEventPayload (org.folio.DataImportEventPayload)13 HashMap (java.util.HashMap)12 ProfileSnapshotWrapper (org.folio.rest.jaxrs.model.ProfileSnapshotWrapper)12 Test (org.junit.jupiter.api.Test)12 ParsedRecord (org.folio.ParsedRecord)9 Record (org.folio.Record)9 InvoiceLineCollection (org.folio.rest.jaxrs.model.InvoiceLineCollection)8 Invoice (org.folio.rest.jaxrs.model.Invoice)6 PoLine (org.folio.rest.acq.model.orders.PoLine)5 PoLineCollection (org.folio.rest.acq.model.orders.PoLineCollection)5 Test (org.junit.Test)4 KafkaConfig (com.bakdata.quick.common.config.KafkaConfig)2 QuickTopicData (com.bakdata.quick.common.type.QuickTopicData)2 KafkaIngestService (com.bakdata.quick.gateway.ingest.KafkaIngestService)2 RegexPattern (com.github.tomakehurst.wiremock.matching.RegexPattern)2 UrlPathPattern (com.github.tomakehurst.wiremock.matching.UrlPathPattern)2 DataFetchingEnvironment (graphql.schema.DataFetchingEnvironment)2 ArrayList (java.util.ArrayList)2