use of com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedStats in project kylo by Teradata.
the class OpsFeedManagerFeedProvider method getLastActiveTimeStamp.
@Override
public DateTime getLastActiveTimeStamp(String feedName) {
DateTime lastFeedTime = null;
OpsManagerFeed feed = this.findByName(feedName);
if (feed.isStream()) {
NifiFeedStats feedStats = metadataAccess.read(() -> nifiFeedStatisticsProvider.findLatestStatsForFeed(feedName));
if (feedStats != null && feedStats.getLastActivityTimestamp() != null) {
lastFeedTime = new DateTime(feedStats.getLastActivityTimestamp());
} else {
log.warn("feedStats.getLastActivityTimestamp is null for streaming feed {} ", feedName);
}
} else {
BatchJobExecution jobExecution = metadataAccess.read(() -> batchJobExecutionProvider.findLatestCompletedJobForFeed(feedName));
if (jobExecution != null) {
lastFeedTime = jobExecution.getEndTime();
}
}
return lastFeedTime;
}
use of com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedStats in project kylo by Teradata.
the class NifiStatsJmsReceiver method saveFeedStats.
/**
* Save the running totals for the feed
*/
private Map<String, JpaNifiFeedStats> saveFeedStats(AggregatedFeedProcessorStatisticsHolderV2 holder, List<NifiFeedProcessorStats> summaryStats) {
Map<String, JpaNifiFeedStats> feedStatsMap = new HashMap<>();
if (summaryStats != null) {
Map<String, Long> feedLatestTimestamp = summaryStats.stream().collect(Collectors.toMap(NifiFeedProcessorStats::getFeedName, stats -> stats.getMinEventTime().getMillis(), Long::max));
feedLatestTimestamp.entrySet().stream().forEach(e -> {
String feedName = e.getKey();
Long timestamp = e.getValue();
JpaNifiFeedStats stats = feedStatsMap.computeIfAbsent(feedName, name -> new JpaNifiFeedStats(feedName));
OpsManagerFeed opsManagerFeed = provenanceEventFeedUtil.getFeed(feedName);
if (opsManagerFeed != null) {
stats.setFeedId(new JpaNifiFeedStats.OpsManagerFeedId(opsManagerFeed.getId().toString()));
}
stats.setLastActivityTimestamp(timestamp);
});
}
if (holder.getProcessorIdRunningFlows() != null) {
holder.getProcessorIdRunningFlows().entrySet().stream().forEach(e -> {
String feedProcessorId = e.getKey();
Long runningCount = e.getValue();
// ensure not null
String feedName = provenanceEventFeedUtil.getFeedName(feedProcessorId);
if (StringUtils.isNotBlank(feedName)) {
JpaNifiFeedStats stats = feedStatsMap.computeIfAbsent(feedName, name -> new JpaNifiFeedStats(feedName));
OpsManagerFeed opsManagerFeed = provenanceEventFeedUtil.getFeed(feedName);
if (opsManagerFeed != null) {
stats.setFeedId(new JpaNifiFeedStats.OpsManagerFeedId(opsManagerFeed.getId().toString()));
stats.setStream(opsManagerFeed.isStream());
}
stats.addRunningFeedFlows(runningCount);
if (holder instanceof AggregatedFeedProcessorStatisticsHolderV3) {
stats.setTime(((AggregatedFeedProcessorStatisticsHolderV3) holder).getTimestamp());
if (stats.getLastActivityTimestamp() == null) {
stats.setLastActivityTimestamp(((AggregatedFeedProcessorStatisticsHolderV3) holder).getTimestamp());
}
} else {
stats.setTime(DateTime.now().getMillis());
}
if (stats.getLastActivityTimestamp() == null) {
log.warn("The JpaNifiFeedStats.lastActivityTimestamp for the feed {} is NULL. The JMS Class was: {}", feedName, holder.getClass().getSimpleName());
}
}
});
}
// group stats to save together by feed name
if (!feedStatsMap.isEmpty()) {
// only save those that have changed
List<NifiFeedStats> updatedStats = feedStatsMap.entrySet().stream().map(e -> e.getValue()).collect(Collectors.toList());
// if the running flows are 0 and its streaming we should try back to see if this feed is running or not
updatedStats.stream().filter(s -> s.isStream()).forEach(stats -> {
latestStatsCache.put(stats.getFeedName(), (JpaNifiFeedStats) stats);
if (stats.getRunningFeedFlows() == 0L) {
batchJobExecutionProvider.markStreamingFeedAsStopped(stats.getFeedName());
} else {
batchJobExecutionProvider.markStreamingFeedAsStarted(stats.getFeedName());
}
});
nifiFeedStatisticsProvider.saveLatestFeedStats(updatedStats);
}
return feedStatsMap;
}
use of com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedStats in project kylo by Teradata.
the class NifiFeedProcessorStatisticsRestControllerV2 method findFeedStats.
@GET
@Path("/{feedName}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Gets the statistics for the specified feed.")
@ApiResponses(@ApiResponse(code = 200, message = "Returns the feed statistics.", response = com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats.class, responseContainer = "List"))
public Response findFeedStats(@PathParam("feedName") String feedName, @QueryParam("from") Long fromMillis, @QueryParam("to") Long toMillis) {
this.accessController.checkPermission(AccessController.SERVICES, OperationsAccessControl.ACCESS_OPS);
final DateTime endTime = getToDateTime(toMillis);
final DateTime startTime = getFromDateTime(fromMillis);
return metadataAccess.read(() -> {
NiFiFeedProcessorStatsContainer statsContainer = new NiFiFeedProcessorStatsContainer(startTime, endTime);
NifiFeedStats feedStats = nifiFeedStatisticsProvider.findLatestStatsForFeed(feedName);
List<? extends NifiFeedProcessorStats> list = statsProvider.findForFeedStatisticsGroupedByTime(feedName, statsContainer.getStartTime(), statsContainer.getEndTime());
List<com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats> model = NifiFeedProcessorStatsTransform.toModel(list);
statsContainer.setStats(model);
if (feedStats != null) {
statsContainer.setRunningFlows(feedStats.getRunningFeedFlows());
} else {
// calc diff from finished - started
Long started = model.stream().mapToLong(s -> s.getJobsStarted()).sum();
Long finished = model.stream().mapToLong(s -> s.getJobsFinished()).sum();
Long running = started - finished;
if (running < 0) {
running = 0L;
}
statsContainer.setRunningFlows(running);
}
return Response.ok(statsContainer).build();
});
}
use of com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedStats in project kylo by Teradata.
the class OpsFeedManagerFeedProvider method save.
@Override
public OpsManagerFeed save(OpsManagerFeed.ID feedId, String systemName, boolean isStream, Long timeBetweenBatchJobs) {
OpsManagerFeed feed = repository.findByIdWithoutAcl(feedId);
if (feed == null) {
if (ensureUniqueFeedName) {
ensureAndRemoveDuplicateFeedsWithTheSameName(systemName, feedId);
}
feed = new JpaOpsManagerFeed();
((JpaOpsManagerFeed) feed).setName(systemName);
((JpaOpsManagerFeed) feed).setId((OpsManagerFeedId) feedId);
((JpaOpsManagerFeed) feed).setStream(isStream);
((JpaOpsManagerFeed) feed).setTimeBetweenBatchJobs(timeBetweenBatchJobs);
NifiFeedStats stats = feedStatisticsProvider.findLatestStatsForFeedWithoutAccessControl(systemName);
if (stats == null) {
JpaNifiFeedStats newStats = new JpaNifiFeedStats(systemName, new JpaNifiFeedStats.OpsManagerFeedId(feedId.toString()));
newStats.setRunningFeedFlows(0L);
feedStatisticsProvider.saveLatestFeedStats(Lists.newArrayList(newStats));
}
} else {
((JpaOpsManagerFeed) feed).setStream(isStream);
((JpaOpsManagerFeed) feed).setTimeBetweenBatchJobs(timeBetweenBatchJobs);
}
feed = save(feed);
return feed;
}
use of com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedStats in project kylo by Teradata.
the class JpaBatchJobExecutionProvider method getJobStatusCount.
/**
* Get count of Jobs grouped by Status
* Streaming Feeds are given a count of 1 if they are running, regardless of the number of active running flows
*/
@Override
public List<JobStatusCount> getJobStatusCount(String filter) {
QJpaBatchJobExecution jobExecution = QJpaBatchJobExecution.jpaBatchJobExecution;
QJpaBatchJobInstance jobInstance = QJpaBatchJobInstance.jpaBatchJobInstance;
QJpaOpsManagerFeed feed = QJpaOpsManagerFeed.jpaOpsManagerFeed;
BooleanBuilder whereBuilder = new BooleanBuilder();
if (StringUtils.isNotBlank(filter)) {
whereBuilder.and(GenericQueryDslFilter.buildFilter(jobExecution, filter));
}
ConstructorExpression<JpaBatchJobExecutionStatusCounts> expr = Projections.constructor(JpaBatchJobExecutionStatusCounts.class, JobStatusDslQueryExpressionBuilder.jobState().as("status"), jobExecution.jobExecutionId.count().as("count"));
JPAQuery<?> query = factory.select(expr).from(jobExecution).innerJoin(jobInstance).on(jobExecution.jobInstance.jobInstanceId.eq(jobInstance.jobInstanceId)).innerJoin(feed).on(jobInstance.feed.id.eq(feed.id)).where(whereBuilder.and(feed.isStream.eq(false)).and(FeedAclIndexQueryAugmentor.generateExistsExpression(feed.id, controller.isEntityAccessControlled()))).groupBy(jobExecution.status);
List<JobStatusCount> stats = (List<JobStatusCount>) query.fetch();
// merge in streaming feed stats
List<? extends NifiFeedStats> streamingFeedStats = feedStatisticsProvider.findFeedStats(true);
if (streamingFeedStats != null) {
if (stats == null) {
stats = new ArrayList<>();
}
Long runningCount = streamingFeedStats.stream().filter(s -> s.getRunningFeedFlows() > 0L).count();
if (runningCount > 0) {
JobStatusCount runningStatusCount = stats.stream().filter(s -> s.getStatus().equalsIgnoreCase(BatchJobExecution.RUNNING_DISPLAY_STATUS)).findFirst().orElse(null);
if (runningStatusCount != null) {
runningCount = runningStatusCount.getCount() + runningCount;
runningStatusCount.setCount(runningCount);
} else {
JpaBatchJobExecutionStatusCounts runningStreamingFeedCounts = new JpaBatchJobExecutionStatusCounts();
runningStreamingFeedCounts.setCount(runningCount);
runningStreamingFeedCounts.setStatus(BatchJobExecution.RUNNING_DISPLAY_STATUS);
stats.add(runningStreamingFeedCounts);
}
}
}
return stats;
}
Aggregations