use of com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed in project kylo by Teradata.
the class JpaBatchJobExecutionProvider method createJobInstance.
@Override
public BatchJobInstance createJobInstance(ProvenanceEventRecordDTO event) {
JpaBatchJobInstance jobInstance = new JpaBatchJobInstance();
jobInstance.setJobKey(jobKeyGenerator(event));
jobInstance.setJobName(event.getFeedName());
// wire this instance to the Feed
OpsManagerFeed feed = opsManagerFeedRepository.findByName(event.getFeedName());
jobInstance.setFeed(feed);
BatchJobInstance batchJobInstance = this.jobInstanceRepository.save(jobInstance);
return batchJobInstance;
}
use of com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed in project kylo by Teradata.
the class FeedOpsUpgradeAction method upgradeTo.
/* (non-Javadoc)
* @see com.thinkbiganalytics.metadata.upgrade.UpgradeState#upgradeFrom(com.thinkbiganalytics.metadata.api.app.KyloVersion)
*/
@Override
public void upgradeTo(KyloVersion startingVersion) {
log.info("Upgrading from version: " + startingVersion);
for (Category category : categoryProvider.findAll()) {
// Ensure each category has an allowedActions (gets create if not present.)
category.getAllowedActions();
}
// get all feeds defined in feed manager
List<Feed> domainFeeds = feedProvider.findAll();
Map<String, Feed> feedManagerFeedMap = new HashMap<>();
if (domainFeeds != null && !domainFeeds.isEmpty()) {
List<OpsManagerFeed.ID> opsManagerFeedIds = new ArrayList<OpsManagerFeed.ID>();
for (Feed feedManagerFeed : domainFeeds) {
opsManagerFeedIds.add(opsManagerFeedProvider.resolveId(feedManagerFeed.getId().toString()));
feedManagerFeedMap.put(feedManagerFeed.getId().toString(), feedManagerFeed);
// Ensure each feed has an allowedActions (gets create if not present.)
feedManagerFeed.getAllowedActions();
}
// find those that match
List<? extends OpsManagerFeed> opsManagerFeeds = opsManagerFeedProvider.findByFeedIds(opsManagerFeedIds);
if (opsManagerFeeds != null) {
for (OpsManagerFeed opsManagerFeed : opsManagerFeeds) {
feedManagerFeedMap.remove(opsManagerFeed.getId().toString());
}
}
List<OpsManagerFeed> feedsToAdd = new ArrayList<>();
for (Feed feed : feedManagerFeedMap.values()) {
String fullName = FeedNameUtil.fullName(feed.getCategory().getSystemName(), feed.getName());
OpsManagerFeed.ID opsManagerFeedId = opsManagerFeedProvider.resolveId(feed.getId().toString());
OpsManagerFeed opsManagerFeed = new JpaOpsManagerFeed(opsManagerFeedId, fullName);
feedsToAdd.add(opsManagerFeed);
}
log.info("Synchronizing Feeds from Feed Manager. About to insert {} feed ids/names into Operations Manager", feedsToAdd.size());
opsManagerFeedProvider.save(feedsToAdd);
}
}
use of com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed in project kylo by Teradata.
the class DefaultJobService method failJobExecution.
@Override
public void failJobExecution(Long executionId) {
metadataAccess.commit(() -> {
BatchJobExecution execution = this.jobExecutionProvider.findByJobExecutionId(executionId, false);
if (execution != null && !execution.isFailed()) {
Set<BatchStepExecution> steps = execution.getStepExecutions();
if (steps != null) {
for (BatchStepExecution step : steps) {
if (!step.isFinished()) {
step.setStatus(BatchStepExecution.StepStatus.FAILED);
step.setExitCode(ExecutionConstants.ExitCode.FAILED);
String msg = step.getExitMessage() != null ? step.getExitMessage() + "\n" : "";
msg += "Step manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone();
step.setExitMessage(msg);
execution.setExitMessage(msg);
}
}
}
if (execution.getStartTime() == null) {
execution.setStartTime(DateTimeUtil.getNowUTCTime());
}
execution.setStatus(BatchJobExecution.JobStatus.FAILED);
if (execution.getEndTime() == null) {
execution.setEndTime(DateTimeUtil.getNowUTCTime());
}
String msg = execution.getExitMessage() != null ? execution.getExitMessage() + "\n" : "";
msg += "Job manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone();
execution.setExitMessage(msg);
OpsManagerFeed feed = execution.getJobInstance().getFeed();
this.jobExecutionProvider.save(execution);
this.jobExecutionProvider.notifyFailure(execution, feed, false, "Job manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone());
}
return execution;
});
}
use of com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed in project kylo by Teradata.
the class NifiStatsJmsReceiver method ensureStreamingJobExecutionRecord.
private void ensureStreamingJobExecutionRecord(NifiFeedProcessorStats stats) {
if (stats.getJobsStarted() > 0 || stats.getJobsFinished() > 0) {
OpsManagerFeed feed = provenanceEventFeedUtil.getFeed(stats.getFeedName());
if (feed.isStream()) {
ProvenanceEventRecordDTO event = new ProvenanceEventRecordDTO();
event.setEventId(stats.getMaxEventId());
event.setEventTime(stats.getMinEventTime().getMillis());
event.setEventDuration(stats.getDuration());
event.setFlowFileUuid(stats.getLatestFlowFileId());
event.setJobFlowFileId(stats.getLatestFlowFileId());
event.setComponentId(stats.getProcessorId());
event.setComponentName(stats.getProcessorName());
event.setIsFailure(stats.getFailedCount() > 0L);
event.setStream(feed.isStream());
event.setIsStartOfJob(stats.getJobsStarted() > 0L);
event.setIsFinalJobEvent(stats.getJobsFinished() > 0L);
event.setFeedProcessGroupId(stats.getFeedProcessGroupId());
event.setFeedName(stats.getFeedName());
ProvenanceEventRecordDTOHolder holder = new ProvenanceEventRecordDTOHolder();
List<ProvenanceEventRecordDTO> events = new ArrayList<>();
events.add(event);
holder.setEvents(events);
log.info("Ensuring Streaming Feed Event: {} has a respective JobExecution record ", event);
provenanceEventReceiver.receiveEvents(holder);
}
}
}
use of com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed in project kylo by Teradata.
the class NifiStatsJmsReceiver method saveFeedStats.
/**
* Save the running totals for the feed
*/
private Map<String, JpaNifiFeedStats> saveFeedStats(AggregatedFeedProcessorStatisticsHolderV2 holder, List<NifiFeedProcessorStats> summaryStats) {
Map<String, JpaNifiFeedStats> feedStatsMap = new HashMap<>();
if (summaryStats != null) {
Map<String, Long> feedLatestTimestamp = summaryStats.stream().collect(Collectors.toMap(NifiFeedProcessorStats::getFeedName, stats -> stats.getMinEventTime().getMillis(), Long::max));
feedLatestTimestamp.entrySet().stream().forEach(e -> {
String feedName = e.getKey();
Long timestamp = e.getValue();
JpaNifiFeedStats stats = feedStatsMap.computeIfAbsent(feedName, name -> new JpaNifiFeedStats(feedName));
OpsManagerFeed opsManagerFeed = provenanceEventFeedUtil.getFeed(feedName);
if (opsManagerFeed != null) {
stats.setFeedId(new JpaNifiFeedStats.OpsManagerFeedId(opsManagerFeed.getId().toString()));
}
stats.setLastActivityTimestamp(timestamp);
});
}
if (holder.getProcessorIdRunningFlows() != null) {
holder.getProcessorIdRunningFlows().entrySet().stream().forEach(e -> {
String feedProcessorId = e.getKey();
Long runningCount = e.getValue();
// ensure not null
String feedName = provenanceEventFeedUtil.getFeedName(feedProcessorId);
if (StringUtils.isNotBlank(feedName)) {
JpaNifiFeedStats stats = feedStatsMap.computeIfAbsent(feedName, name -> new JpaNifiFeedStats(feedName));
OpsManagerFeed opsManagerFeed = provenanceEventFeedUtil.getFeed(feedName);
if (opsManagerFeed != null) {
stats.setFeedId(new JpaNifiFeedStats.OpsManagerFeedId(opsManagerFeed.getId().toString()));
stats.setStream(opsManagerFeed.isStream());
}
stats.addRunningFeedFlows(runningCount);
if (holder instanceof AggregatedFeedProcessorStatisticsHolderV3) {
stats.setTime(((AggregatedFeedProcessorStatisticsHolderV3) holder).getTimestamp());
if (stats.getLastActivityTimestamp() == null) {
stats.setLastActivityTimestamp(((AggregatedFeedProcessorStatisticsHolderV3) holder).getTimestamp());
}
} else {
stats.setTime(DateTime.now().getMillis());
}
if (stats.getLastActivityTimestamp() == null) {
log.warn("The JpaNifiFeedStats.lastActivityTimestamp for the feed {} is NULL. The JMS Class was: {}", feedName, holder.getClass().getSimpleName());
}
}
});
}
// group stats to save together by feed name
if (!feedStatsMap.isEmpty()) {
// only save those that have changed
List<NifiFeedStats> updatedStats = feedStatsMap.entrySet().stream().map(e -> e.getValue()).collect(Collectors.toList());
// if the running flows are 0 and its streaming we should try back to see if this feed is running or not
updatedStats.stream().filter(s -> s.isStream()).forEach(stats -> {
latestStatsCache.put(stats.getFeedName(), (JpaNifiFeedStats) stats);
if (stats.getRunningFeedFlows() == 0L) {
batchJobExecutionProvider.markStreamingFeedAsStopped(stats.getFeedName());
} else {
batchJobExecutionProvider.markStreamingFeedAsStarted(stats.getFeedName());
}
});
nifiFeedStatisticsProvider.saveLatestFeedStats(updatedStats);
}
return feedStatsMap;
}
Aggregations