use of net.mguenther.kafka.junit.KeyValue in project mod-inventory by folio-org.
the class DataImportConsumerVerticleTest method shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSucceeded.
@Test
public void shouldSendEventWithProcessedEventPayloadWhenProcessingCoreHandlerSucceeded() throws InterruptedException {
// given
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withTenant(TENANT_ID).withOkapiUrl(mockServer.baseUrl()).withToken("test-token").withJobExecutionId(UUID.randomUUID().toString()).withContext(new HashMap<>(Map.of("JOB_PROFILE_SNAPSHOT_ID", profileSnapshotWrapper.getId())));
String topic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_NAME, getDefaultNameSpace(), TENANT_ID, dataImportEventPayload.getEventType());
Event event = new Event().withId("01").withEventPayload(Json.encode(dataImportEventPayload));
KeyValue<String, String> record = new KeyValue<>("test-key", Json.encode(event));
record.addHeader(RECORD_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
record.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
// when
cluster.send(request);
// then
String observeTopic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_NAME, getDefaultNameSpace(), TENANT_ID, DI_COMPLETED.value());
List<KeyValue<String, String>> observedValues = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
assertEquals(1, observedValues.size());
assertNotNull(observedValues.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode.
@Test
public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(ACTION_PROFILE).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(MODIFY).withFolioRecord(ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(MAPPING_PROFILE).withContent(JsonObject.mapFrom(new MappingProfile().withId(UUID.randomUUID().toString()).withIncomingRecordType(MARC_BIBLIOGRAPHIC).withExistingRecordType(MARC_BIBLIOGRAPHIC).withMappingDetails(new MappingDetail().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY).withMarcMappingDetails(List.of(marcMappingDetail)))).getMap())))));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
String expectedParsedContent = "{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
DataImportEventPayload eventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0)).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withContext(new HashMap<>() {
{
put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
}
});
String topic = getTopicName(DI_SRS_MARC_BIB_RECORD_CREATED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
// then
var value = DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value();
String observeTopic = getTopicName(value);
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
DataImportEventPayload dataImportEventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value(), dataImportEventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
assertEquals(Record.State.ACTUAL, actualRecord.getState());
assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class ParsedRecordChunkConsumersVerticleTest method sendRecordsToKafka.
private void sendRecordsToKafka(String jobExecutionId, List<Record> records) throws InterruptedException {
RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
Event event = new Event().withEventPayload(Json.encode(recordCollection));
KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
record.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, Charset.defaultCharset());
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
cluster.send(request);
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class ParsedRecordChunkConsumersVerticleTest method sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent.
private void sendEventWithSavedMarcRecordCollectionPayloadAfterProcessingParsedRecordEvent(RecordType recordType, RawRecord rawRecord, ParsedRecord parsedRecord) throws InterruptedException {
List<Record> records = new ArrayList<>();
records.add(new Record().withId(recordId).withMatchedId(recordId).withSnapshotId(snapshotId).withGeneration(0).withRecordType(recordType).withRawRecord(rawRecord).withParsedRecord(parsedRecord));
RecordCollection recordCollection = new RecordCollection().withRecords(records).withTotalRecords(records.size());
String topic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_RAW_RECORDS_CHUNK_PARSED.value());
Event event = new Event().withEventPayload(Json.encode(recordCollection));
KeyValue<String, String> record = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
record.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
record.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(record)).useDefaults();
cluster.send(request);
String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_PARSED_RECORDS_CHUNK_SAVED.value());
cluster.observeValues(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
}
Aggregations