use of org.folio.JobProfile in project mod-inventory by folio-org.
the class DeleteAuthorityEventHandlerTest method isEligibleShouldReturnFalseIfCurrentNodeIsNotActionProfile.
@Test
public void isEligibleShouldReturnFalseIfCurrentNodeIsNotActionProfile() {
JobProfile jobProfile = new JobProfile().withId(UUID.randomUUID().toString()).withName("Create MARC Authority").withDataType(JobProfile.DataType.MARC);
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withProfileId(jobProfile.getId()).withContentType(JOB_PROFILE).withContent(jobProfile);
DataImportEventPayload dataImportEventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.value()).withContext(context).withCurrentNode(profileSnapshotWrapper);
assertFalse(eventHandler.isEligible(dataImportEventPayload));
}
use of org.folio.JobProfile in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldDeleteMarcAuthorityRecord.
@Test
public void shouldDeleteMarcAuthorityRecord() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(List.of(new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(ACTION_PROFILE).withOrder(0).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(DELETE).withFolioRecord(ActionProfile.FolioRecord.MARC_AUTHORITY)).getMap())));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
HashMap<String, String> payloadContext = new HashMap<>();
payloadContext.put("MATCHED_MARC_AUTHORITY", Json.encode(record));
payloadContext.put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
var eventPayload = new DataImportEventPayload().withContext(payloadContext).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0));
String topic = getTopicName(DI_MARC_FOR_DELETE_RECEIVED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
var request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
String observeTopic = getTopicName(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.name());
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
var resultPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_AUTHORITY_RECORD_DELETED.value(), resultPayload.getEventType());
assertEquals(record.getExternalIdsHolder().getAuthorityId(), resultPayload.getContext().get("AUTHORITY_RECORD_ID"));
assertEquals(ACTION_PROFILE, resultPayload.getCurrentNode().getContentType());
}
use of org.folio.JobProfile in project mod-source-record-storage by folio-org.
the class DataImportConsumersVerticleTest method shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode.
@Test
public void shouldModifyRecordWhenPayloadContainsModifyMarcBibActionInCurrentNode() throws InterruptedException {
ProfileSnapshotWrapper profileSnapshotWrapper = new ProfileSnapshotWrapper().withId(UUID.randomUUID().toString()).withContentType(JOB_PROFILE).withContent(JsonObject.mapFrom(new JobProfile().withId(UUID.randomUUID().toString()).withDataType(JobProfile.DataType.MARC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(ACTION_PROFILE).withContent(JsonObject.mapFrom(new ActionProfile().withId(UUID.randomUUID().toString()).withAction(MODIFY).withFolioRecord(ActionProfile.FolioRecord.MARC_BIBLIOGRAPHIC)).getMap()).withChildSnapshotWrappers(singletonList(new ProfileSnapshotWrapper().withContentType(MAPPING_PROFILE).withContent(JsonObject.mapFrom(new MappingProfile().withId(UUID.randomUUID().toString()).withIncomingRecordType(MARC_BIBLIOGRAPHIC).withExistingRecordType(MARC_BIBLIOGRAPHIC).withMappingDetails(new MappingDetail().withMarcMappingOption(MappingDetail.MarcMappingOption.MODIFY).withMarcMappingDetails(List.of(marcMappingDetail)))).getMap())))));
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(PROFILE_SNAPSHOT_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
String expectedParsedContent = "{\"leader\":\"00107nam 22000491a 4500\",\"fields\":[{\"001\":\"ybp7406411\"},{\"856\":{\"subfields\":[{\"u\":\"http://libproxy.smith.edu?url=example.com\"}],\"ind1\":\" \",\"ind2\":\" \"}}]}";
DataImportEventPayload eventPayload = new DataImportEventPayload().withEventType(DI_SRS_MARC_BIB_RECORD_CREATED.value()).withJobExecutionId(snapshotId).withCurrentNode(profileSnapshotWrapper.getChildSnapshotWrappers().get(0)).withOkapiUrl(mockServer.baseUrl()).withTenant(TENANT_ID).withToken(TOKEN).withContext(new HashMap<>() {
{
put(MARC_BIBLIOGRAPHIC.value(), Json.encode(record));
put(PROFILE_SNAPSHOT_ID_KEY, profileSnapshotWrapper.getId());
}
});
String topic = getTopicName(DI_SRS_MARC_BIB_RECORD_CREATED.value());
KeyValue<String, String> kafkaRecord = buildKafkaRecord(eventPayload);
kafkaRecord.addHeader(RECORD_ID_HEADER, record.getId(), UTF_8);
kafkaRecord.addHeader(CHUNK_ID_HEADER, UUID.randomUUID().toString(), UTF_8);
SendKeyValues<String, String> request = SendKeyValues.to(topic, singletonList(kafkaRecord)).useDefaults();
// when
cluster.send(request);
// then
var value = DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value();
String observeTopic = getTopicName(value);
List<KeyValue<String, String>> observedRecords = cluster.observe(ObserveKeyValues.on(observeTopic, 1).observeFor(30, TimeUnit.SECONDS).build());
Event obtainedEvent = Json.decodeValue(observedRecords.get(0).getValue(), Event.class);
DataImportEventPayload dataImportEventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
assertEquals(DI_SRS_MARC_BIB_RECORD_MODIFIED_READY_FOR_POST_PROCESSING.value(), dataImportEventPayload.getEventType());
Record actualRecord = Json.decodeValue(dataImportEventPayload.getContext().get(MARC_BIBLIOGRAPHIC.value()), Record.class);
assertEquals(expectedParsedContent, actualRecord.getParsedRecord().getContent().toString());
assertEquals(Record.State.ACTUAL, actualRecord.getState());
assertEquals(dataImportEventPayload.getJobExecutionId(), actualRecord.getSnapshotId());
assertNotNull(observedRecords.get(0).getHeaders().lastHeader(RECORD_ID_HEADER));
}
Aggregations