Search in sources :

Example 21 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunkConsumersVerticleTest method check_DI_ERROR_eventsSent.

private void check_DI_ERROR_eventsSent(String jobExecutionId, List<Record> records, String... errorMessages) throws InterruptedException {
    List<DataImportEventPayload> testedEventsPayLoads = new ArrayList<>();
    String observeTopic = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, DI_ERROR.value());
    List<String> observedValues = cluster.readValues(ReadKeyValues.from(observeTopic).build());
    if (CollectionUtils.isEmpty(observedValues)) {
        observedValues = cluster.observeValues(ObserveKeyValues.on(observeTopic, records.size()).observeFor(30, TimeUnit.SECONDS).build());
    }
    for (String observedValue : observedValues) {
        Event obtainedEvent = Json.decodeValue(observedValue, Event.class);
        DataImportEventPayload eventPayload = Json.decodeValue(obtainedEvent.getEventPayload(), DataImportEventPayload.class);
        if (jobExecutionId.equals(eventPayload.getJobExecutionId())) {
            testedEventsPayLoads.add(eventPayload);
        }
    }
    assertEquals(EXPECTED_ERROR_EVENTS_NUMBER, testedEventsPayLoads.size());
    for (DataImportEventPayload eventPayload : testedEventsPayLoads) {
        String recordId = eventPayload.getContext().get(ParsedRecordChunksErrorHandler.RECORD_ID_HEADER);
        String error = eventPayload.getContext().get(ParsedRecordChunksErrorHandler.ERROR_KEY);
        assertEquals(DI_ERROR.value(), eventPayload.getEventType());
        assertEquals(TENANT_ID, eventPayload.getTenant());
        assertTrue(StringUtils.isNotBlank(recordId));
        for (String errorMessage : errorMessages) {
            assertTrue(error.contains(errorMessage));
        }
        assertFalse(eventPayload.getEventsChain().isEmpty());
        assertEquals(DI_LOG_SRS_MARC_BIB_RECORD_CREATED.value(), eventPayload.getEventsChain().get(0));
    }
}
Also used : ArrayList(java.util.ArrayList) Event(org.folio.rest.jaxrs.model.Event) DataImportEventPayload(org.folio.rest.jaxrs.model.DataImportEventPayload)

Example 22 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class QuickMarcKafkaHandlerTest method createRequest.

private SendKeyValues<String, String> createRequest(HashMap<String, String> payload) {
    String topic = formatTopicName(kafkaConfig.getEnvId(), getDefaultNameSpace(), TENANT_ID, QM_RECORD_UPDATED.name());
    Event event = new Event().withId(UUID.randomUUID().toString()).withEventPayload(Json.encode(payload));
    KeyValue<String, String> eventRecord = new KeyValue<>(KAFKA_KEY_NAME, Json.encode(event));
    eventRecord.addHeader(OkapiConnectionParams.OKAPI_URL_HEADER, OKAPI_URL, Charset.defaultCharset());
    eventRecord.addHeader(OkapiConnectionParams.OKAPI_TENANT_HEADER, TENANT_ID, Charset.defaultCharset());
    eventRecord.addHeader(OkapiConnectionParams.OKAPI_TOKEN_HEADER, TOKEN, Charset.defaultCharset());
    return SendKeyValues.to(topic, Collections.singletonList(eventRecord)).useDefaults();
}
Also used : KeyValue(net.mguenther.kafka.junit.KeyValue) Event(org.folio.rest.jaxrs.model.Event)

Example 23 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class DataImportKafkaHandler method handle.

@Override
public Future<String> handle(KafkaConsumerRecord<String, String> targetRecord) {
    String recordId = extractValueFromHeaders(targetRecord.headers(), RECORD_ID_HEADER);
    String chunkId = extractValueFromHeaders(targetRecord.headers(), CHUNK_ID_HEADER);
    try {
        Promise<String> promise = Promise.promise();
        Event event = ObjectMapperTool.getMapper().readValue(targetRecord.value(), Event.class);
        DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
        LOGGER.debug("Data import event payload has been received with event type: '{}' by jobExecutionId: '{}' and recordId: '{}' and chunkId: '{}'", eventPayload.getEventType(), eventPayload.getJobExecutionId(), recordId, chunkId);
        eventPayload.getContext().put(RECORD_ID_HEADER, recordId);
        eventPayload.getContext().put(CHUNK_ID_HEADER, chunkId);
        OkapiConnectionParams params = RestUtil.retrieveOkapiConnectionParams(eventPayload, vertx);
        String jobProfileSnapshotId = eventPayload.getContext().get(PROFILE_SNAPSHOT_ID_KEY);
        profileSnapshotCache.get(jobProfileSnapshotId, params).toCompletionStage().thenCompose(snapshotOptional -> snapshotOptional.map(profileSnapshot -> EventManager.handleEvent(eventPayload, profileSnapshot)).orElse(CompletableFuture.failedFuture(new EventProcessingException(format("Job profile snapshot with id '%s' does not exist", jobProfileSnapshotId))))).whenComplete((processedPayload, throwable) -> {
            if (throwable != null) {
                promise.fail(throwable);
            } else if (DI_ERROR.value().equals(processedPayload.getEventType())) {
                promise.fail(format("Failed to process data import event payload from topic '%s' by jobExecutionId: '%s' with recordId: '%s' and chunkId: '%s' ", targetRecord.topic(), eventPayload.getJobExecutionId(), recordId, chunkId));
            } else {
                promise.complete(targetRecord.key());
            }
        });
        return promise.future();
    } catch (Exception e) {
        LOGGER.error("Failed to process data import kafka record from topic '{}' with recordId: '{}' and chunkId: '{}' ", targetRecord.topic(), recordId, chunkId, e);
        return Future.failedFuture(e);
    }
}
Also used : Event(org.folio.rest.jaxrs.model.Event) Json(io.vertx.core.json.Json) RestUtil(org.folio.services.util.RestUtil) Promise(io.vertx.core.Promise) Vertx(io.vertx.core.Vertx) DataImportEventPayload(org.folio.DataImportEventPayload) Autowired(org.springframework.beans.factory.annotation.Autowired) CompletableFuture(java.util.concurrent.CompletableFuture) AsyncRecordHandler(org.folio.kafka.AsyncRecordHandler) Future(io.vertx.core.Future) OkapiConnectionParams(org.folio.dataimport.util.OkapiConnectionParams) String.format(java.lang.String.format) DI_ERROR(org.folio.DataImportEventTypes.DI_ERROR) Component(org.springframework.stereotype.Component) List(java.util.List) Logger(org.apache.logging.log4j.Logger) EventProcessingException(org.folio.processing.exceptions.EventProcessingException) KafkaConsumerRecord(io.vertx.kafka.client.consumer.KafkaConsumerRecord) ObjectMapperTool(org.folio.dbschema.ObjectMapperTool) JobProfileSnapshotCache(org.folio.services.caches.JobProfileSnapshotCache) Qualifier(org.springframework.beans.factory.annotation.Qualifier) KafkaHeader(io.vertx.kafka.client.producer.KafkaHeader) LogManager(org.apache.logging.log4j.LogManager) EventManager(org.folio.processing.events.EventManager) Event(org.folio.rest.jaxrs.model.Event) OkapiConnectionParams(org.folio.dataimport.util.OkapiConnectionParams) EventProcessingException(org.folio.processing.exceptions.EventProcessingException) DataImportEventPayload(org.folio.DataImportEventPayload) EventProcessingException(org.folio.processing.exceptions.EventProcessingException)

Example 24 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunksKafkaHandler method sendBackRecordsBatchResponse.

private Future<String> sendBackRecordsBatchResponse(RecordsBatchResponse recordsBatchResponse, List<KafkaHeader> kafkaHeaders, String tenantId, int chunkNumber, String eventType, KafkaConsumerRecord<String, String> commonRecord) {
    Event event;
    event = new Event().withId(UUID.randomUUID().toString()).withEventType(DI_PARSED_RECORDS_CHUNK_SAVED.value()).withEventPayload(Json.encode(normalize(recordsBatchResponse))).withEventMetadata(new EventMetadata().withTenantId(tenantId).withEventTTL(1).withPublishedBy(constructModuleName()));
    String key = String.valueOf(indexer.incrementAndGet() % maxDistributionNum);
    String topicName = KafkaTopicNameHelper.formatTopicName(kafkaConfig.getEnvId(), KafkaTopicNameHelper.getDefaultNameSpace(), tenantId, DI_PARSED_RECORDS_CHUNK_SAVED.value());
    KafkaProducerRecord<String, String> targetRecord = KafkaProducerRecord.create(topicName, key, Json.encode(event));
    targetRecord.addHeaders(kafkaHeaders);
    Promise<String> writePromise = Promise.promise();
    String producerName = DI_PARSED_RECORDS_CHUNK_SAVED + "_Producer";
    KafkaProducer<String, String> producer = KafkaProducer.createShared(Vertx.currentContext().owner(), producerName, kafkaConfig.getProducerProps());
    producer.write(targetRecord, war -> {
        producer.end(ear -> producer.close());
        if (war.succeeded()) {
            String recordId = extractValueFromHeaders(commonRecord.headers(), RECORD_ID_HEADER);
            String chunkId = extractValueFromHeaders(commonRecord.headers(), CHUNK_ID_HEADER);
            LOGGER.debug("RecordCollection processing has been completed with response sent... event: '{}', chunkId: '{}', chunkNumber '{}'-'{}' with recordId: '{}'", eventType, chunkId, chunkNumber, targetRecord.key(), recordId);
            writePromise.complete(targetRecord.key());
        } else {
            Throwable cause = war.cause();
            LOGGER.error("{} write error {}", producerName, cause);
            writePromise.fail(cause);
        }
    });
    return writePromise.future();
}
Also used : Event(org.folio.rest.jaxrs.model.Event) EventMetadata(org.folio.rest.jaxrs.model.EventMetadata)

Example 25 with Event

use of org.folio.rest.jaxrs.model.Event in project mod-source-record-storage by folio-org.

the class ParsedRecordChunksKafkaHandler method handle.

@Override
public Future<String> handle(KafkaConsumerRecord<String, String> targetRecord) {
    Event event = Json.decodeValue(targetRecord.value(), Event.class);
    RecordCollection recordCollection = Json.decodeValue(event.getEventPayload(), RecordCollection.class);
    List<KafkaHeader> kafkaHeaders = targetRecord.headers();
    OkapiConnectionParams okapiConnectionParams = new OkapiConnectionParams(KafkaHeaderUtils.kafkaHeadersToMap(kafkaHeaders), vertx);
    String tenantId = okapiConnectionParams.getTenantId();
    String recordId = extractValueFromHeaders(targetRecord.headers(), RECORD_ID_HEADER);
    String chunkId = extractValueFromHeaders(targetRecord.headers(), CHUNK_ID_HEADER);
    String key = targetRecord.key();
    int chunkNumber = chunkCounter.incrementAndGet();
    DataImportEventPayload eventPayload = Json.decodeValue(event.getEventPayload(), DataImportEventPayload.class);
    try {
        LOGGER.debug("RecordCollection has been received with event: '{}', chunkId: '{}', starting processing... chunkNumber '{}'-'{}' with recordId: '{}'' ", eventPayload.getEventType(), chunkId, chunkNumber, key, recordId);
        return recordService.saveRecords(recordCollection, tenantId).compose(recordsBatchResponse -> sendBackRecordsBatchResponse(recordsBatchResponse, kafkaHeaders, tenantId, chunkNumber, eventPayload.getEventType(), targetRecord));
    } catch (Exception e) {
        LOGGER.error("RecordCollection processing has failed with errors with event: '{}', chunkId: '{}', chunkNumber '{}'-'{}' with recordId: '{}' ", eventPayload.getEventType(), chunkId, chunkNumber, key, recordId);
        return Future.failedFuture(e);
    }
}
Also used : RecordCollection(org.folio.rest.jaxrs.model.RecordCollection) Event(org.folio.rest.jaxrs.model.Event) OkapiConnectionParams(org.folio.dataimport.util.OkapiConnectionParams) KafkaHeader(io.vertx.kafka.client.producer.KafkaHeader) DataImportEventPayload(org.folio.DataImportEventPayload)

Aggregations

Event (org.folio.rest.jaxrs.model.Event)90 Test (org.junit.Test)41 HashMap (java.util.HashMap)32 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)28 DataImportEventPayload (org.folio.DataImportEventPayload)27 KeyValue (net.mguenther.kafka.junit.KeyValue)22 Async (io.vertx.ext.unit.Async)21 KafkaConsumerRecord (io.vertx.kafka.client.consumer.KafkaConsumerRecord)16 OkapiConnectionParams (org.folio.dataimport.util.OkapiConnectionParams)16 Record (org.folio.rest.jaxrs.model.Record)14 LogManager (org.apache.logging.log4j.LogManager)12 Logger (org.apache.logging.log4j.Logger)12 ProfileSnapshotWrapper (org.folio.rest.jaxrs.model.ProfileSnapshotWrapper)12 Test (org.junit.jupiter.api.Test)12 KafkaHeader (io.vertx.kafka.client.producer.KafkaHeader)11 DuplicateEventException (org.folio.kafka.exception.DuplicateEventException)10 AbstractRestTest (org.folio.rest.impl.AbstractRestTest)10 RecordCollection (org.folio.rest.jaxrs.model.RecordCollection)10 RecordsBatchResponse (org.folio.rest.jaxrs.model.RecordsBatchResponse)10 Future (io.vertx.core.Future)9