use of net.mguenther.kafka.junit.KeyValue in project quick by bakdata.
the class MutationFetcherTest method shouldIngestDataWithDifferentValueTypes.
@ParameterizedTest(name = "shouldIngestDataWithDifferentValueTypes ({0})")
@MethodSource("provideValueArguments")
<V, T> void shouldIngestDataWithDifferentValueTypes(final TestParameterBuilder<String, V, T> testParameter) throws Exception {
final String topic = testParameter.getTopic();
final QuickData<T> valueInfo = testParameter.getInfoType();
final QuickTopicData<String, T> info = new QuickTopicData<>(topic, TopicWriteType.MUTABLE, TestTypeUtils.newStringData(), valueInfo);
final KafkaConfig kafkaConfig = new KafkaConfig(kafkaCluster.getBrokerList(), schemaRegistry.getUrl());
kafkaCluster.createTopic(TopicConfig.withName(topic).useDefaults());
final KafkaIngestService kafkaIngestService = new KafkaIngestService(topicTypeService(QuickTopicType.STRING, valueInfo.getType(), null), kafkaConfig);
final DataFetcher<T> mutationFetcher = new MutationFetcher<>(topic, "id", "name", new Lazy<>(() -> info), kafkaIngestService);
final KeyValue<String, V> keyValue = testParameter.getKeyValue();
final DataFetchingEnvironment env = DataFetchingEnvironmentImpl.newDataFetchingEnvironment().arguments(Map.of("id", keyValue.getKey(), "name", keyValue.getValue())).build();
final T actual = mutationFetcher.get(env);
assertThat(actual).isEqualTo(keyValue.getValue());
final Optional<KeyValue<String, T>> consumedRecords = kafkaCluster.read(ReadKeyValues.from(topic, testParameter.getClassType()).with("schema.registry.url", schemaRegistry.getUrl()).with(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueInfo.getSerde().deserializer().getClass())).stream().findFirst();
assertThat(consumedRecords).isPresent().get().hasFieldOrPropertyWithValue("key", keyValue.getKey()).hasFieldOrPropertyWithValue("value", keyValue.getValue());
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-manager by folio-org.
the class RawMarcChunkConsumersVerticleTest method shouldNotObserveValuesWhenJobExecutionIdNotCreated.
@Test
public void shouldNotObserveValuesWhenJobExecutionIdNotCreated() throws InterruptedException, IOException {
RawRecordsDto chunk = getChunk(RecordsMetadata.ContentType.MARC_RAW, RAW_RECORD_WITH_999_ff_field);
String jobExecutionId = UUID.randomUUID().toString();
Event event = new Event().withId(UUID.randomUUID().toString()).withEventPayload(Json.encode(chunk));
KeyValue<String, String> kafkaRecord = new KeyValue<>("1", Json.encode(event));
kafkaRecord.addHeader(OKAPI_TENANT_HEADER, TENANT_ID, UTF_8);
kafkaRecord.addHeader(OKAPI_URL_HEADER, snapshotMockServer.baseUrl(), UTF_8);
kafkaRecord.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, UTF_8);
String topic = formatToKafkaTopicName(DI_RAW_RECORDS_CHUNK_READ.value());
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(kafkaRecord)).useDefaults();
// when
kafkaCluster.send(request);
// then
checkEventWithTypeWasNotSend(jobExecutionId, DI_RAW_RECORDS_CHUNK_PARSED);
checkDiErrorEventsSent(jobExecutionId, "Couldn't find JobExecution with id");
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-manager by folio-org.
the class RawMarcChunkConsumersVerticleTest method prepareWithSpecifiedEventPayload.
private SendKeyValues<String, String> prepareWithSpecifiedEventPayload(JobProfileInfo.DataType dataType, String eventPayload) {
String jobExecutionId = emulateJobExecutionIdRequest(dataType);
Event event = new Event().withId(UUID.randomUUID().toString()).withEventPayload(eventPayload);
KeyValue<String, String> kafkaRecord = new KeyValue<>("key", Json.encode(event));
kafkaRecord.addHeader(OKAPI_TENANT_HEADER, TENANT_ID, UTF_8);
kafkaRecord.addHeader(OKAPI_URL_HEADER, snapshotMockServer.baseUrl(), UTF_8);
kafkaRecord.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, UTF_8);
String topic = formatToKafkaTopicName(DI_RAW_RECORDS_CHUNK_READ.value());
return SendKeyValues.to(topic, Collections.singletonList(kafkaRecord)).useDefaults();
}
use of net.mguenther.kafka.junit.KeyValue in project mod-source-record-manager by folio-org.
the class StoredRecordChunkConsumersVerticleTest method getRequest.
private SendKeyValues<String, String> getRequest(String jobExecutionId, RecordsBatchResponse recordsBatch) {
Event event = new Event().withId(UUID.randomUUID().toString()).withEventPayload(Json.encode(recordsBatch));
KeyValue<String, String> kafkaRecord = new KeyValue<>("42", (Json.encode(event)));
kafkaRecord.addHeader(OKAPI_TENANT_HEADER, TENANT_ID, UTF_8);
kafkaRecord.addHeader(OKAPI_TOKEN_HEADER, TOKEN, UTF_8);
kafkaRecord.addHeader(JOB_EXECUTION_ID_HEADER, jobExecutionId, UTF_8);
String topic = formatToKafkaTopicName(DI_PARSED_RECORDS_CHUNK_SAVED.value());
return SendKeyValues.to(topic, Collections.singletonList(kafkaRecord)).useDefaults();
}
use of net.mguenther.kafka.junit.KeyValue in project mod-invoice by folio-org.
the class CreateInvoiceEventHandlerTest method shouldMatchPoLinesByPoLineNumberAndCreateInvoiceLinesWithPoLinesFundDistributions.
@Test
public void shouldMatchPoLinesByPoLineNumberAndCreateInvoiceLinesWithPoLinesFundDistributions() throws IOException, InterruptedException {
// given
PoLine poLine1 = Json.decodeValue(getMockData(String.format(MOCK_DATA_PATH_PATTERN, PO_LINES_MOCK_DATA_PATH, PO_LINE_ID_1)), PoLine.class);
PoLine poLine3 = Json.decodeValue(getMockData(String.format(MOCK_DATA_PATH_PATTERN, PO_LINES_MOCK_DATA_PATH, PO_LINE_ID_3)), PoLine.class);
PoLineCollection poLineCollection = new PoLineCollection().withPoLines(List.of(poLine1, poLine3));
when(mockOrderLinesRestClient.get(any(), any(RequestContext.class), eq(PoLineCollection.class))).thenReturn(CompletableFuture.completedFuture(poLineCollection));
ProfileSnapshotWrapper profileSnapshotWrapper = buildProfileSnapshotWrapper(jobProfile, actionProfile, mappingProfileWithPoLineFundDistribution);
addMockEntry(JOB_PROFILE_SNAPSHOTS_MOCK, profileSnapshotWrapper);
Record record = new Record().withParsedRecord(new ParsedRecord().withContent(EDIFACT_PARSED_CONTENT)).withId(UUID.randomUUID().toString());
HashMap<String, String> payloadContext = new HashMap<>();
payloadContext.put(EDIFACT_INVOICE.value(), Json.encode(record));
payloadContext.put(JOB_PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload().withEventType(DI_EDIFACT_RECORD_CREATED.value()).withTenant(DI_POST_INVOICE_LINES_SUCCESS_TENANT).withOkapiUrl(OKAPI_URL).withToken(TOKEN).withContext(payloadContext);
String topic = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_VALUE, getDefaultNameSpace(), DI_POST_INVOICE_LINES_SUCCESS_TENANT, dataImportEventPayload.getEventType());
Event event = new Event().withEventPayload(Json.encode(dataImportEventPayload));
KeyValue<String, String> kafkaRecord = new KeyValue<>("test-key", Json.encode(event));
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, Collections.singletonList(kafkaRecord)).useDefaults();
// when
kafkaCluster.send(request);
// then
String topicToObserve = KafkaTopicNameHelper.formatTopicName(KAFKA_ENV_VALUE, getDefaultNameSpace(), DI_POST_INVOICE_LINES_SUCCESS_TENANT, DI_COMPLETED.value());
List<String> observedValues = kafkaCluster.observeValues(ObserveKeyValues.on(topicToObserve, 1).with(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedValues.get(0), Event.class);
DataImportEventPayload eventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_INVOICE_CREATED.value(), eventPayload.getEventsChain().get(eventPayload.getEventsChain().size() - 1));
assertNotNull(eventPayload.getContext().get(INVOICE.value()));
Invoice createdInvoice = Json.decodeValue(eventPayload.getContext().get(INVOICE.value()), Invoice.class);
assertNotNull(eventPayload.getContext().get(INVOICE_LINES_KEY));
InvoiceLineCollection createdInvoiceLines = Json.decodeValue(eventPayload.getContext().get(INVOICE_LINES_KEY), InvoiceLineCollection.class);
assertEquals(3, createdInvoiceLines.getTotalRecords());
assertEquals(3, createdInvoiceLines.getInvoiceLines().size());
createdInvoiceLines.getInvoiceLines().forEach(invLine -> assertEquals(createdInvoice.getId(), invLine.getInvoiceId()));
assertEquals(poLine1.getId(), createdInvoiceLines.getInvoiceLines().get(0).getPoLineId());
assertEquals(poLine3.getId(), createdInvoiceLines.getInvoiceLines().get(2).getPoLineId());
assertNull(createdInvoiceLines.getInvoiceLines().get(1).getPoLineId());
// compare fundDistributions as JsonObject since fund distributions are represented by different classes in invoice line and po line
assertEquals(new JsonArray(Json.encode(poLine1.getFundDistribution())), new JsonArray(Json.encode(createdInvoiceLines.getInvoiceLines().get(0).getFundDistributions())));
assertEquals(new JsonArray(Json.encode(poLine3.getFundDistribution())), new JsonArray(Json.encode(createdInvoiceLines.getInvoiceLines().get(2).getFundDistributions())));
assertTrue(createdInvoiceLines.getInvoiceLines().get(1).getFundDistributions().isEmpty());
}
Aggregations