use of com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution in project kylo by Teradata.
the class JpaBatchJobExecutionProvider method markStreamingFeedAsStarted.
public void markStreamingFeedAsStarted(String feed) {
BatchJobExecution jobExecution = findLatestJobForFeed(feed);
// ensure its Running
if (jobExecution != null && !jobExecution.getStatus().equals(BatchJobExecution.JobStatus.STARTED)) {
log.info("Starting Streaming feed job {} for Feed {} ", jobExecution.getJobExecutionId(), feed);
jobExecution.setStatus(BatchJobExecution.JobStatus.STARTED);
jobExecution.setExitCode(ExecutionConstants.ExitCode.EXECUTING);
((JpaBatchJobExecution) jobExecution).setLastUpdated(DateTimeUtil.getNowUTCTime());
jobExecution.setStartTime(DateTimeUtil.getNowUTCTime());
save(jobExecution);
latestStreamingJobByFeedName.put(feed, jobExecution);
}
}
use of com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution in project kylo by Teradata.
the class JpaBatchJobExecutionProvider method findAll.
/**
* Find all BatchJobExecution objects with the provided filter. the filter needs to match
*
* @return a paged result set of all the job executions matching the incoming filter
*/
@Override
public Page<? extends BatchJobExecution> findAll(String filter, Pageable pageable) {
QJpaBatchJobExecution jobExecution = QJpaBatchJobExecution.jpaBatchJobExecution;
// if the filter contains a filter on the feed then delegate to the findAllForFeed method to include any check data jobs
List<SearchCriteria> searchCriterias = GenericQueryDslFilter.parseFilterString(filter);
SearchCriteria feedFilter = searchCriterias.stream().map(searchCriteria -> searchCriteria.withKey(CommonFilterTranslations.resolvedFilter(jobExecution, searchCriteria.getKey()))).filter(sc -> sc.getKey().equalsIgnoreCase(CommonFilterTranslations.jobExecutionFeedNameFilterKey)).findFirst().orElse(null);
if (feedFilter != null && feedFilter.getPreviousSearchCriteria() != null && !feedFilter.isValueCollection()) {
// remove the feed filter from the list and filter by this feed
searchCriterias.remove(feedFilter.getPreviousSearchCriteria());
String feedValue = feedFilter.getValue().toString();
// remove any quotes around the feedValue
feedValue = feedValue.replaceAll("^\"|\"$", "");
return findAllForFeed(feedValue, searchCriterias, pageable);
} else {
pageable = CommonFilterTranslations.resolveSortFilters(jobExecution, pageable);
QJpaBatchJobInstance jobInstancePath = new QJpaBatchJobInstance("jobInstance");
QJpaOpsManagerFeed feedPath = new QJpaOpsManagerFeed("feed");
return findAllWithFetch(jobExecution, GenericQueryDslFilter.buildFilter(jobExecution, filter).and(augment(feedPath.id)), pageable, QueryDslFetchJoin.innerJoin(jobExecution.nifiEventJobExecution), QueryDslFetchJoin.innerJoin(jobExecution.jobInstance, jobInstancePath), QueryDslFetchJoin.innerJoin(jobInstancePath.feed, feedPath));
}
}
use of com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution in project kylo by Teradata.
the class FeedFailureMetricAssessorTest method testExistingBatchFailure.
/**
* Tests when no new jobs have been run since the last time, but the last job execution was failed
*/
@Test
public void testExistingBatchFailure() throws ParseException {
DateTime lastAssessedTime = DateTime.now();
String feedName = "feed";
List<NifiFeedProcessorStats> streamingStats = new ArrayList<>();
boolean isStream = false;
this.metric.setFeedName(feedName);
when(feedProvider.findByName(feedName)).thenReturn(newOpsManagerFeed(feedName, isStream));
DateTime startingEvent = DateTime.now().minusMinutes(5);
List<? extends BatchJobExecution> batchJobs = new ArrayList<>();
BatchJobExecution jobExecution = newBatchJob(feedName, startingEvent, true);
Mockito.when(this.jobExecutionProvider.findLatestFinishedJobForFeedSince(Mockito.anyString(), Mockito.any(DateTime.class))).thenAnswer(x -> batchJobs);
Mockito.when(this.jobExecutionProvider.findLatestFinishedJobForFeed(Mockito.anyString())).thenAnswer(x -> jobExecution);
this.assessor.assess(metric, this.builder);
verify(this.builder).result(AssessmentResult.FAILURE);
}
use of com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution in project kylo by Teradata.
the class FeedOnTimeArrivalMetricAssessorTest method createFeedJobExecution.
private BatchJobExecution createFeedJobExecution(DateTime endTime) {
BatchJobExecution feed = mock(BatchJobExecution.class);
when(feed.getEndTime()).thenReturn(endTime);
return feed;
}
use of com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution in project kylo by Teradata.
the class ProvenanceEventReceiver method notifyJobFinished.
/**
* Notify that the Job is complete either as a successful job or failed Job
* if its a streaming event notifications will go out every xx seconds default 5
* if its a batch it will always notify
*
* @param event a provenance event
*/
private void notifyJobFinished(BatchJobExecution jobExecution, ProvenanceEventRecordDTO event) {
if (isNotifyJobFinished(event)) {
// register the event as being triggered
String mapKey = triggeredEventsKey(event);
completedJobEvents.put(mapKey, mapKey);
lastFeedFinishedNotificationCache.put(event.getFeedName(), DateTime.now());
metadataAccess.commit(() -> {
BatchJobExecution batchJobExecution = jobExecution;
if ((!event.isStream() && batchJobExecution.isFailed()) || (event.isStream() && event.isFailure())) {
// requery for failure events as we need to access the map of data for alert generation
batchJobExecution = batchJobExecutionProvider.findByJobExecutionId(jobExecution.getJobExecutionId(), false);
failedJob(batchJobExecution, event);
} else {
successfulJob(batchJobExecution, event);
}
}, MetadataAccess.SERVICE);
} else {
log.debug("skipping job finished notification for feed: {}, isStream:{}, isFailure:{} ", event.getFeedName(), event.isStream(), event.isFailure());
}
}
Aggregations