use of com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecution in project kylo by Teradata.
the class JpaBatchJobExecution method failJob.
/**
* Complete a job and mark it as failed setting its status to {@link com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution.JobStatus#FAILED}
*/
public void failJob() {
StringBuffer stringBuffer = null;
String stepExecutionFailuresString = "Step Execution Failures:\n";
setStatus(JpaBatchJobExecution.JobStatus.FAILED);
setExitCode(ExecutionConstants.ExitCode.FAILED);
if (endTime == null) {
endTime = DateTimeUtil.getNowUTCTime();
}
Set<BatchStepExecution> steps = getStepExecutions();
String existingExitMessage = getExitMessage();
if (StringUtils.isNotBlank(existingExitMessage)) {
existingExitMessage = StringUtils.substringBefore(existingExitMessage, stepExecutionFailuresString);
}
if (steps != null) {
for (BatchStepExecution se : steps) {
if (BatchStepExecution.StepStatus.FAILED.equals(se.getStatus())) {
if (stringBuffer == null) {
stringBuffer = new StringBuffer(stepExecutionFailuresString);
} else {
stringBuffer.append("\n");
}
stringBuffer.append("Failed Step " + se.getStepName());
}
if (se.getEndTime() == null) {
((JpaBatchStepExecution) se).setEndTime(DateTimeUtil.getNowUTCTime());
}
}
if (stringBuffer != null) {
// append the exit message
String msg = existingExitMessage != null ? existingExitMessage + "\n" : "" + stringBuffer.toString();
setExitMessage(msg);
}
}
}
use of com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecution in project kylo by Teradata.
the class DefaultJobService method failJobExecution.
@Override
public void failJobExecution(Long executionId) {
metadataAccess.commit(() -> {
BatchJobExecution execution = this.jobExecutionProvider.findByJobExecutionId(executionId, false);
if (execution != null && !execution.isFailed()) {
Set<BatchStepExecution> steps = execution.getStepExecutions();
if (steps != null) {
for (BatchStepExecution step : steps) {
if (!step.isFinished()) {
step.setStatus(BatchStepExecution.StepStatus.FAILED);
step.setExitCode(ExecutionConstants.ExitCode.FAILED);
String msg = step.getExitMessage() != null ? step.getExitMessage() + "\n" : "";
msg += "Step manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone();
step.setExitMessage(msg);
execution.setExitMessage(msg);
}
}
}
if (execution.getStartTime() == null) {
execution.setStartTime(DateTimeUtil.getNowUTCTime());
}
execution.setStatus(BatchJobExecution.JobStatus.FAILED);
if (execution.getEndTime() == null) {
execution.setEndTime(DateTimeUtil.getNowUTCTime());
}
String msg = execution.getExitMessage() != null ? execution.getExitMessage() + "\n" : "";
msg += "Job manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone();
execution.setExitMessage(msg);
OpsManagerFeed feed = execution.getJobInstance().getFeed();
this.jobExecutionProvider.save(execution);
this.jobExecutionProvider.notifyFailure(execution, feed, false, "Job manually failed @ " + DateTimeUtil.getNowFormattedWithTimeZone());
}
return execution;
});
}
use of com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecution in project kylo by Teradata.
the class JpaBatchJobExecution method completeOrFailJob.
/**
* Finish the job and update teh status and end time as being completed, or failed, based upon the status of the {@link BatchStepExecution}'s
*/
public void completeOrFailJob() {
boolean failedJob = false;
Set<BatchStepExecution> steps = getStepExecutions();
if (steps != null) {
for (BatchStepExecution se : steps) {
if (BatchStepExecution.StepStatus.FAILED.equals(se.getStatus())) {
failedJob = true;
break;
}
}
}
if (failedJob) {
failJob();
} else {
completeJob();
}
}
use of com.thinkbiganalytics.metadata.api.jobrepo.step.BatchStepExecution in project kylo by Teradata.
the class JpaBatchStepExecutionProvider method createStepExecution.
// Drop on SetSavepoint indicates release on parent flowfile id
public BatchStepExecution createStepExecution(BatchJobExecution jobExecution, ProvenanceEventRecordDTO event) {
// only create the step if it doesnt exist yet for this event
JpaBatchStepExecution stepExecution = batchStepExecutionRepository.findByProcessorAndJobFlowFile(event.getComponentId(), event.getJobFlowFileId());
if (stepExecution == null) {
if (!"KYLO".equalsIgnoreCase(event.getEventType())) {
stepExecution = new JpaBatchStepExecution();
stepExecution.setJobExecution(jobExecution);
stepExecution.setStartTime(event.getStartTime() != null ? DateTimeUtil.convertToUTC(event.getStartTime()) : DateTimeUtil.convertToUTC(event.getEventTime()).minus(event.getEventDuration()));
stepExecution.setEndTime(DateTimeUtil.convertToUTC(event.getEventTime()));
stepExecution.setStepName(event.getComponentName());
if (StringUtils.isBlank(stepExecution.getStepName())) {
stepExecution.setStepName("Unknown Step ");
}
log.info("New Step Execution {} on Job: {} using event {} ", stepExecution.getStepName(), jobExecution.getJobExecutionId(), event.getEventId());
boolean failure = event.isFailure();
if (failure) {
// notify failure listeners
failStep(jobExecution, stepExecution, event.getFlowFileUuid(), event.getComponentId());
if (StringUtils.isBlank(stepExecution.getExitMessage())) {
stepExecution.setExitMessage(event.getDetails());
}
} else {
stepExecution.completeStep();
}
// add in execution contexts
assignStepExecutionContextMap(event, stepExecution);
// Attach the NifiEvent object to this StepExecution
JpaNifiEventStepExecution eventStepExecution = new JpaNifiEventStepExecution(jobExecution, stepExecution, event.getEventId(), event.getJobFlowFileId());
eventStepExecution.setComponentId(event.getComponentId());
eventStepExecution.setJobFlowFileId(event.getJobFlowFileId());
stepExecution.setNifiEventStepExecution(eventStepExecution);
Set<BatchStepExecution> steps = jobExecution.getStepExecutions();
if (steps == null) {
((JpaBatchJobExecution) jobExecution).setStepExecutions(new HashSet<>());
}
// saving the StepExecution will cascade and save the nifiEventStep
stepExecution = batchStepExecutionRepository.save(stepExecution);
jobExecution.getStepExecutions().add(stepExecution);
}
} else {
log.info("Updating step {} ", event.getComponentName());
// update it
assignStepExecutionContextMap(event, stepExecution);
// update the timing info
Long originatingNiFiEventId = stepExecution.getNifiEventStepExecution().getEventId();
// only update the end time if the eventid is > than the first one
if (event.getEventId() > originatingNiFiEventId) {
DateTime newEndTime = DateTimeUtil.convertToUTC(event.getEventTime());
if (newEndTime.isAfter(stepExecution.getEndTime())) {
stepExecution.setEndTime(newEndTime);
}
} else {
DateTime newStartTime = DateTimeUtil.convertToUTC(event.getStartTime());
if (newStartTime.isBefore(stepExecution.getStartTime())) {
stepExecution.setStartTime(newStartTime);
}
}
boolean failure = event.isFailure();
if (failure) {
// notify failure listeners
log.info("Failing Step");
failStep(jobExecution, stepExecution, event.getFlowFileUuid(), event.getComponentId());
if (StringUtils.isBlank(stepExecution.getExitMessage())) {
stepExecution.setExitMessage(event.getDetails());
}
}
stepExecution = batchStepExecutionRepository.save(stepExecution);
}
return stepExecution;
}
Aggregations