use of com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder in project kylo by Teradata.
the class ProvenanceRestController method addProvenance.
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("add custom provenance events to a job")
@ApiResponses(@ApiResponse(code = 200, message = "add custom provenance events to a job", response = String.class))
public Response addProvenance(ProvenanceEventRecordDTOHolder eventRecordDTOHolder) {
ProvenanceEventRecordDTOHolder batchEventEntity = new ProvenanceEventRecordDTOHolder();
List<ProvenanceEventRecordDTO> events = eventRecordDTOHolder.getEvents();
List<ProvenanceEventRecordDTO> batchEvents = new ArrayList<>();
for (ProvenanceEventRecordDTO event : events) {
if (!event.isStream()) {
batchEvents.add(event);
}
}
// reassign the events
batchEventEntity.setEvents(batchEvents);
AggregatedFeedProcessorStatisticsHolder stats = GroupedStatsUtil.gatherStats(events);
log.info("Processing {} batch events", batchEventEntity);
provenanceEventReceiver.receiveEvents(batchEventEntity);
log.info("Processing {} stats ", stats);
statsJmsReceiver.receiveTopic(stats);
return Response.ok(new RestResponseStatus.ResponseStatusBuilder().message("Processed " + eventRecordDTOHolder).buildSuccess()).build();
}
use of com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder in project kylo by Teradata.
the class NifiStatsJmsReceiver method ensureStreamingJobExecutionRecord.
private void ensureStreamingJobExecutionRecord(NifiFeedProcessorStats stats) {
if (stats.getJobsStarted() > 0 || stats.getJobsFinished() > 0) {
OpsManagerFeed feed = provenanceEventFeedUtil.getFeed(stats.getFeedName());
if (feed != null && feed.isStream()) {
ProvenanceEventRecordDTO event = new ProvenanceEventRecordDTO();
event.setEventId(stats.getMaxEventId());
event.setEventTime(stats.getMinEventTime().getMillis());
event.setEventDuration(stats.getDuration());
event.setFlowFileUuid(stats.getLatestFlowFileId());
event.setJobFlowFileId(stats.getLatestFlowFileId());
event.setComponentId(stats.getProcessorId());
event.setComponentName(stats.getProcessorName());
event.setIsFailure(stats.getFailedCount() > 0L);
event.setStream(feed.isStream());
event.setIsStartOfJob(stats.getJobsStarted() > 0L);
event.setIsFinalJobEvent(stats.getJobsFinished() > 0L);
event.setFeedProcessGroupId(stats.getFeedProcessGroupId());
event.setFeedName(stats.getFeedName());
ProvenanceEventRecordDTOHolder holder = new ProvenanceEventRecordDTOHolder();
List<ProvenanceEventRecordDTO> events = new ArrayList<>();
events.add(event);
holder.setEvents(events);
log.debug("Ensuring Streaming Feed Event: {} has a respective JobExecution record ", event);
provenanceEventReceiver.receiveEvents(holder);
}
}
}
use of com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder in project kylo by Teradata.
the class KyloKafkaProvenanceEventService method sendEvents.
@Override
public void sendEvents(List<ProvenanceEventRecordDTO> events) throws ProvenanceException {
try {
List<Future<RecordMetadata>> resultFutures = new ArrayList<>();
ProvenanceEventRecordDTOHolder eventRecordDTOHolder = new ProvenanceEventRecordDTOHolder();
List<ProvenanceEventRecordDTO> batchEvents = new ArrayList<>();
for (ProvenanceEventRecordDTO event : events) {
if (!event.isStream()) {
batchEvents.add(event);
}
}
eventRecordDTOHolder.setEvents(batchEvents);
byte[] data = SerializationUtils.serialize(eventRecordDTOHolder);
ProducerRecord<byte[], byte[]> eventsMessage = new ProducerRecord<>(KYLO_BATCH_EVENT_TOPIC, data);
log.info("Sending {} events to Kafka ", eventRecordDTOHolder);
resultFutures.add(kafkaProducer.send(eventsMessage));
AggregatedFeedProcessorStatisticsHolder stats = GroupedStatsUtil.gatherStats(events);
data = SerializationUtils.serialize(stats);
ProducerRecord<byte[], byte[]> statsMessage = new ProducerRecord<>(KYLO_EVENT_STATS_TOPIC, data);
resultFutures.add(kafkaProducer.send(statsMessage));
processAcks(resultFutures);
} catch (Exception e) {
throw new ProvenanceException(e);
}
}
use of com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder in project kylo by Teradata.
the class KyloJmsProvenanceEventService method sendEvents.
private void sendEvents(String jmsUrl, List<ProvenanceEventRecordDTO> events) throws Exception {
ProvenanceEventRecordDTOHolder eventRecordDTOHolder = new ProvenanceEventRecordDTOHolder();
List<ProvenanceEventRecordDTO> batchEvents = new ArrayList<>();
for (ProvenanceEventRecordDTO event : events) {
if (!event.isStream()) {
batchEvents.add(event);
}
}
eventRecordDTOHolder.setEvents(batchEvents);
AggregatedFeedProcessorStatisticsHolder stats = GroupedStatsUtil.gatherStats(events);
log.info("Sending {} events to JMS ", eventRecordDTOHolder);
sendKyloBatchEventMessage(jmsUrl, eventRecordDTOHolder);
sendKyloEventStatisticsMessage(jmsUrl, stats);
log.info("Events successfully sent to JMS");
}
use of com.thinkbiganalytics.nifi.provenance.model.ProvenanceEventRecordDTOHolder in project kylo by Teradata.
the class KyloRestProvenanceEventService method sendEvents.
@Override
public void sendEvents(List<ProvenanceEventRecordDTO> events) throws ProvenanceException {
ProvenanceEventRecordDTOHolder eventRecordDTOHolder = new ProvenanceEventRecordDTOHolder();
eventRecordDTOHolder.setEvents(events);
restClient.post(PROVENANCE_REST_PATH, eventRecordDTOHolder);
}
Aggregations