use of net.mguenther.kafka.junit.KeyValue in project mod-invoice by folio-org.
the class CreateInvoiceEventHandlerTest method shouldPublishDiErrorWithInvoiceLineErrorWhenOneOfInvoiceLinesCreationFailed.
@Test
public void shouldPublishDiErrorWithInvoiceLineErrorWhenOneOfInvoiceLinesCreationFailed() throws IOException, InterruptedException {
// given
Record record = new Record().withParsedRecord(new ParsedRecord().withContent(PARSED_CONTENT_INVOICE_LINE_3_HAS_NO_SUBTOTAL)).withId(UUID.randomUUID().toString());
ProfileSnapshotWrapper profileSnapshotWrapper = buildProfileSnapshotWrapper(jobProfile, actionProfile, mappingProfile);
addMockEntry(JOB_PROFILE_SNAPSHOTS_MOCK, profileSnapshotWrapper);
HashMap<String, String> payloadContext = new HashMap<>();
payloadContext.put(EDIFACT_INVOICE.value(), Json.encode(record));
payloadContext.put(JOB_PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload().withEventType(DI_EDIFACT_RECORD_CREATED.value()).withTenant(DI_POST_INVOICE_LINES_SUCCESS_TENANT).withOkapiUrl(OKAPI_URL).withToken(TOKEN).withContext(payloadContext);
String topic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_VALUE, getDefaultNameSpace(), DI_POST_INVOICE_LINES_SUCCESS_TENANT, dataImportEventPayload.getEventType());
Event event = new Event().withEventPayload(Json.encode(dataImportEventPayload));
KeyValue<String, String> kafkaRecord = new KeyValue<>("test-key", Json.encode(event));
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(kafkaRecord)).useDefaults();
// when
kafkaCluster.send(request);
// then
String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_VALUE, getDefaultNameSpace(), DI_POST_INVOICE_LINES_SUCCESS_TENANT, DI_ERROR.value());
List<String> observedValues = kafkaCluster.observeValues(ObserveKeyValues.on(topicToObserve, 1).with(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedValues.get(0), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_INVOICE_CREATED.value(), eventPayload.getEventsChain().get(eventPayload.getEventsChain().size() - 1));
assertNotNull(eventPayload.getContext().get(INVOICE.value()));
Invoice createdInvoice = Json.decodeValue(eventPayload.getContext().get(INVOICE.value()), Invoice.class);
assertNotNull(eventPayload.getContext().get(INVOICE_LINES_KEY));
InvoiceLineCollection invoiceLines = Json.decodeValue(eventPayload.getContext().get(INVOICE_LINES_KEY), InvoiceLineCollection.class);
assertEquals(3, invoiceLines.getTotalRecords());
assertEquals(3, invoiceLines.getInvoiceLines().size());
invoiceLines.getInvoiceLines().forEach(invLine -> assertEquals(createdInvoice.getId(), invLine.getInvoiceId()));
assertNotNull(eventPayload.getContext().get(INVOICE_LINES_ERRORS_KEY));
Map<Integer, String> invoiceLinesErrors = DatabindCodec.mapper().readValue(eventPayload.getContext().get(INVOICE_LINES_ERRORS_KEY), new TypeReference<>() {
});
assertEquals(1, invoiceLinesErrors.size());
assertNull(invoiceLinesErrors.get(1));
assertNull(invoiceLinesErrors.get(2));
assertNotNull(invoiceLinesErrors.get(3));
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldDeleteMarcAuthorityRecord.
@Test
public void shouldDeleteMarcAuthorityRecord() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(List.of(new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(ACTION_PROFILE).withOrder(0).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(DELETE).withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY)).getMap())));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
HashMap<String, String> payloadContext = new HashMap<>();
payloadContext.put("MATCHED_MARC_AUTHORITY", Json.encode(record));
payloadContext.put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
var eventPayload = new DataImportEventPayload().withContext(payloadContext).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
String topic = getTopicName(DI_MARC_FOR_DELETE_RECEIVED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
var request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
String observeTopic = getTopicName(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.name());
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
var resultPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.value(), resultPayload.getEventType());
assertEquals(record.getExternalIdsHolder().getAuthorityId(), resultPayload.getContext().get("AUTHORITY_RECORD_ID"));
assertEquals(ACTION_PROFILE, resultPayload.getCurrentNode().getContentType());
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method buildKafkaRecord.
private KeyValue<String, String> buildKafkaRecord(DataImportEventPayload eventPayload) {
Event event = new Event().withEventPayload(Json.encode(eventPayload));
KeyValue<String, String> record = new KeyValue<>("1", Json.encode(event));
record.addHeader(RestUtil.OKAPI_URL_HEADER, mockServer.baseUrl(), StandardCharsets.UTF_8);
record.addHeader(RestUtil.OKAPI_TENANT_HEADER, TENANT_ID, StandardCharsets.UTF_8);
record.addHeader(RestUtil.OKAPI_TOKEN_HEADER, TOKEN, StandardCharsets.UTF_8);
record.addHeader(JOB_EXECUTION_ID_HEADER, snapshotId, StandardCharsets.UTF_8);
return record;
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-storage by folio-org.
the class QuickMarcKafkaHandlerTest method createRequest.
private SendKeyValues<String, String> createRequest(HashMap<String, String> payload) {
String topic = formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, QM_RECORD_UPDATED.name());
Event event = new Event().withId(UUID.randomUUID().toString()).withEventPayload(Json.encode(payload));
KeyValue<String, String> eventRecord = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
eventRecord.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
eventRecord.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
eventRecord.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
return SendKeyValues.to(topic, Collections.singletonList(eventRecord)).useDefaults();
}
use of net.mguenther.kafka.junit.KeyValue in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentKeyTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentKeyTypes ({0})")
@MethodSource("provideValueArgumentsForKeys")
<K, T> void shouldIngestDataWithDifferentKeyTypes(final TestParameterBuilder<K, String, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> keyInfo = testParameter.getInfoType();
final QuickTopicData<T, String> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, keyInfo, TestTypeUtils.newStringData());
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final TopicTypeService typeService = topicTypeService(keyInfo.getType(), QuickTopicType.STRING, null);
final KafkaIngestService kafkaIngestService = new KafkaIngestService(typeService, kafkaConfig);
final DataFetcher<String> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<K, String> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final String actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, String>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, String.class).with(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyInfo.getSerde().deserializer().getClass()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class)).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
Aggregations