use of org.folio.dao.JobExecutionSourceChunkDao in project mod-source-record-manager by folio-org.
the class RawMarcChunkConsumersVerticleTest method shouldNotSendAnyEventsForDuplicates.
@Test
public void shouldNotSendAnyEventsForDuplicates() throws InterruptedException {
// given
RawRecordsDto chunk = getChunk(RecordsMetadata.ContentType.MARC_RAW, RAW_RECORD_WITH_999_ff_field);
JobExecutionSourceChunkDao jobExecutionSourceChunkDao = getBeanFromSpringContext(vertx, org.folio.dao.JobExecutionSourceChunkDao.class);
jobExecutionSourceChunkDao.save(new JobExecutionSourceChunk().withId(chunk.getId()).withState(JobExecutionSourceChunk.State.IN_PROGRESS), TENANT_ID);
SendKeyValues<String, String> request = prepareWithSpecifiedEventPayload(JobProfileInfo.DataType.MARC, Json.encode(chunk));
String jobExecutionId = getJobExecutionId(request);
// when
kafkaCluster.send(request);
// then
checkEventWithTypeWasNotSend(jobExecutionId, DI_RAW_RECORDS_CHUNK_PARSED);
checkEventWithTypeWasNotSend(jobExecutionId, DI_ERROR);
}
Aggregations