use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class ActionKillXCommand method execute.
@Override
protected Void execute() throws CommandException {
LOG.debug("STARTED WorkflowActionKillXCommand for action " + actionId);
if (wfAction.isPending()) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(wfAction.getType());
if (executor != null) {
ActionExecutorContext context = null;
try {
boolean isRetry = false;
boolean isUserRetry = false;
context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
incrActionCounter(wfAction.getType(), 1);
Instrumentation.Cron cron = new Instrumentation.Cron();
cron.start();
executor.kill(context, wfAction);
cron.stop();
addActionCron(wfAction.getType(), cron);
wfAction.resetPending();
wfAction.setStatus(WorkflowActionBean.Status.KILLED);
wfAction.setEndTime(new Date());
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_MODTIME, wfJob));
// Add SLA status event (KILLED) for WF_ACTION
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.KILLED, SlaAppType.WORKFLOW_ACTION);
if (slaEvent != null) {
insertList.add(slaEvent);
}
queue(new WorkflowNotificationXCommand(wfJob, wfAction));
} catch (ActionExecutorException ex) {
wfAction.resetPending();
wfAction.setStatus(WorkflowActionBean.Status.FAILED);
wfAction.setErrorInfo(ex.getErrorCode().toString(), "KILL COMMAND FAILED - exception while executing job kill");
wfAction.setEndTime(new Date());
wfJob.setStatus(WorkflowJobBean.Status.KILLED);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_MODTIME, wfJob));
// What will happen to WF and COORD_ACTION, NOTIFICATION?
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.FAILED, SlaAppType.WORKFLOW_ACTION);
if (slaEvent != null) {
insertList.add(slaEvent);
}
LOG.warn("Exception while executing kill(). Error Code [{0}], Message[{1}]", ex.getErrorCode(), ex.getMessage(), ex);
} finally {
try {
cleanupActionDir(context);
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
if (!(executor instanceof ControlNodeActionExecutor) && EventHandlerService.isEnabled()) {
generateEvent(wfAction, wfJob.getUser());
}
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
}
}
}
LOG.debug("ENDED WorkflowActionKillXCommand for action " + actionId);
return null;
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class ResumeXCommand method execute.
@Override
protected Void execute() throws CommandException {
try {
if (workflow.getStatus() == WorkflowJob.Status.SUSPENDED) {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
workflow.getWorkflowInstance().resume();
WorkflowInstance wfInstance = workflow.getWorkflowInstance();
((LiteWorkflowInstance) wfInstance).setStatus(WorkflowInstance.Status.RUNNING);
workflow.setWorkflowInstance(wfInstance);
workflow.setStatus(WorkflowJob.Status.RUNNING);
// for (WorkflowActionBean action : store.getActionsForWorkflow(id, false)) {
for (WorkflowActionBean action : jpaService.execute(new WorkflowJobGetActionsJPAExecutor(id))) {
// START_MANUAL or END_RETRY or END_MANUAL
if (action.isRetryOrManual()) {
action.setPendingOnly();
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, action));
}
if (action.isPending()) {
if (action.getStatus() == WorkflowActionBean.Status.PREP || action.getStatus() == WorkflowActionBean.Status.START_MANUAL) {
// a repeated transient error, we have to clean up the action dir
if (// The control actions have invalid
!action.getType().equals(StartActionExecutor.TYPE) && // action dir paths because they
!action.getType().equals(ForkActionExecutor.TYPE) && // contain ":" (colons)
!action.getType().equals(JoinActionExecutor.TYPE) && !action.getType().equals(KillActionExecutor.TYPE) && !action.getType().equals(EndActionExecutor.TYPE)) {
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, action, false, false);
if (context.getAppFileSystem().exists(context.getActionDir())) {
context.getAppFileSystem().delete(context.getActionDir(), true);
}
}
queue(new ActionStartXCommand(action.getId(), action.getType()));
} else {
if (action.getStatus() == WorkflowActionBean.Status.START_RETRY) {
Date nextRunTime = action.getPendingAge();
queue(new ActionStartXCommand(action.getId(), action.getType()), nextRunTime.getTime() - System.currentTimeMillis());
} else {
if (action.getStatus() == WorkflowActionBean.Status.DONE || action.getStatus() == WorkflowActionBean.Status.END_MANUAL) {
queue(new ActionEndXCommand(action.getId(), action.getType()));
} else {
if (action.getStatus() == WorkflowActionBean.Status.END_RETRY) {
Date nextRunTime = action.getPendingAge();
queue(new ActionEndXCommand(action.getId(), action.getType()), nextRunTime.getTime() - System.currentTimeMillis());
}
}
}
}
}
}
workflow.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, workflow));
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, null);
if (EventHandlerService.isEnabled()) {
generateEvent(workflow);
}
queue(new WorkflowNotificationXCommand(workflow));
}
return null;
} catch (WorkflowException ex) {
throw new CommandException(ex);
} catch (JPAExecutorException e) {
throw new CommandException(e);
} catch (HadoopAccessorException e) {
throw new CommandException(e);
} catch (IOException e) {
throw new CommandException(ErrorCode.E0902, e.getMessage(), e);
} catch (URISyntaxException e) {
throw new CommandException(ErrorCode.E0902, e.getMessage(), e);
} finally {
updateParentIfNecessary(workflow);
}
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class SignalXCommand method startForkedActions.
public void startForkedActions(List<WorkflowActionBean> workflowActionBeanListForForked) throws CommandException {
List<CallableWrapper<ActionExecutorContext>> tasks = new ArrayList<CallableWrapper<ActionExecutorContext>>();
List<UpdateEntry> updateList = new ArrayList<UpdateEntry>();
List<JsonBean> insertList = new ArrayList<JsonBean>();
boolean endWorkflow = false;
boolean submitJobByQueuing = false;
for (WorkflowActionBean workflowActionBean : workflowActionBeanListForForked) {
LOG.debug("Starting forked actions parallely : " + workflowActionBean.getId());
tasks.add(Services.get().get(CallableQueueService.class).new CallableWrapper<ActionExecutorContext>(new ForkedActionStartXCommand(wfJob, workflowActionBean.getId(), workflowActionBean.getType()), 0));
}
try {
List<Future<ActionExecutorContext>> futures = Services.get().get(CallableQueueService.class).invokeAll(tasks);
for (Future<ActionExecutorContext> result : futures) {
if (result == null) {
submitJobByQueuing = true;
continue;
}
ActionExecutorContext context = result.get();
Map<String, String> contextVariableMap = ((ForkedActionExecutorContext) context).getContextMap();
LOG.debug("contextVariableMap size of action " + context.getAction().getId() + " is " + contextVariableMap.size());
for (String key : contextVariableMap.keySet()) {
context.setVarToWorkflow(key, contextVariableMap.get(key));
}
if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.FAILED)) {
LOG.warn("Action has failed, failing job" + context.getAction().getId());
new ActionStartXCommand(context.getAction().getId(), null).failJob(context);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction()));
if (context.isShouldEndWF()) {
endWorkflow = true;
}
}
if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.SUSPENDED)) {
LOG.warn("Action has failed, failing job" + context.getAction().getId());
new ActionStartXCommand(context.getAction().getId(), null).handleNonTransient(context, null, WorkflowAction.Status.START_MANUAL);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction()));
if (context.isShouldEndWF()) {
endWorkflow = true;
}
}
}
if (endWorkflow) {
endWF(insertList);
}
} catch (Exception e) {
LOG.error("Error running forked jobs parallely", e);
startForkedActionsByQueuing(workflowActionBeanListForForked);
submitJobByQueuing = false;
}
if (submitJobByQueuing && !endWorkflow) {
LOG.error("There is error in running forked jobs parallely");
startForkedActionsByQueuing(workflowActionBeanListForForked);
}
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
LOG.debug("forked actions submitted parallely");
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class BatchQueryExecutor method executeBatchInsertUpdateDelete.
@SuppressWarnings("rawtypes")
public void executeBatchInsertUpdateDelete(Collection<JsonBean> insertList, Collection<UpdateEntry> updateList, Collection<JsonBean> deleteList) throws JPAExecutorException {
List<QueryEntry> queryList = new ArrayList<QueryEntry>();
JPAService jpaService = Services.get().get(JPAService.class);
EntityManager em = jpaService.getEntityManager();
if (updateList != null) {
for (UpdateEntry entry : updateList) {
Query query = null;
JsonBean bean = entry.getBean();
if (bean instanceof WorkflowJobBean) {
query = WorkflowJobQueryExecutor.getInstance().getUpdateQuery((WorkflowJobQuery) entry.getQueryName(), (WorkflowJobBean) entry.getBean(), em);
} else if (bean instanceof WorkflowActionBean) {
query = WorkflowActionQueryExecutor.getInstance().getUpdateQuery((WorkflowActionQuery) entry.getQueryName(), (WorkflowActionBean) entry.getBean(), em);
} else if (bean instanceof CoordinatorJobBean) {
query = CoordJobQueryExecutor.getInstance().getUpdateQuery((CoordJobQuery) entry.getQueryName(), (CoordinatorJobBean) entry.getBean(), em);
} else if (bean instanceof CoordinatorActionBean) {
query = CoordActionQueryExecutor.getInstance().getUpdateQuery((CoordActionQuery) entry.getQueryName(), (CoordinatorActionBean) entry.getBean(), em);
} else if (bean instanceof BundleJobBean) {
query = BundleJobQueryExecutor.getInstance().getUpdateQuery((BundleJobQuery) entry.getQueryName(), (BundleJobBean) entry.getBean(), em);
} else if (bean instanceof BundleActionBean) {
query = BundleActionQueryExecutor.getInstance().getUpdateQuery((BundleActionQuery) entry.getQueryName(), (BundleActionBean) entry.getBean(), em);
} else if (bean instanceof SLARegistrationBean) {
query = SLARegistrationQueryExecutor.getInstance().getUpdateQuery((SLARegQuery) entry.getQueryName(), (SLARegistrationBean) entry.getBean(), em);
} else if (bean instanceof SLASummaryBean) {
query = SLASummaryQueryExecutor.getInstance().getUpdateQuery((SLASummaryQuery) entry.getQueryName(), (SLASummaryBean) entry.getBean(), em);
} else {
throw new JPAExecutorException(ErrorCode.E0603, "BatchQueryExecutor failed to construct a query");
}
queryList.add(new QueryEntry(entry.getQueryName(), query));
}
}
jpaService.executeBatchInsertUpdateDelete(insertList, queryList, deleteList, em);
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class TestSLACalculatorMemory method testLoadOnRestart.
@Test
public void testLoadOnRestart() throws Exception {
SLACalculatorMemory slaCalcMemory = new SLACalculatorMemory();
slaCalcMemory.init(Services.get().get(ConfigurationService.class).getConf());
SLARegistrationBean slaRegBean1 = _createSLARegistration("job-1-W", AppType.WORKFLOW_JOB);
String jobId1 = slaRegBean1.getId();
SLARegistrationBean slaRegBean2 = _createSLARegistration("job-2-W", AppType.WORKFLOW_JOB);
String jobId2 = slaRegBean2.getId();
SLARegistrationBean slaRegBean3 = _createSLARegistration("job-3-W", AppType.WORKFLOW_JOB);
String jobId3 = slaRegBean3.getId();
List<String> idList = new ArrayList<String>();
idList.add(slaRegBean1.getId());
idList.add(slaRegBean2.getId());
idList.add(slaRegBean3.getId());
createWorkflow(idList);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-mm-dd");
slaRegBean1.setAppName("app-name");
slaRegBean1.setExpectedDuration(123);
slaRegBean1.setExpectedEnd(sdf.parse("2012-02-07"));
slaRegBean1.setExpectedStart(sdf.parse("2011-02-07"));
slaRegBean1.setNominalTime(sdf.parse("2012-01-06"));
slaRegBean1.setUser("user");
slaRegBean1.setParentId("parentId");
slaRegBean1.setUpstreamApps("upstreamApps");
slaRegBean1.setNotificationMsg("notificationMsg");
slaRegBean1.setAlertContact("a@abc.com");
slaRegBean1.setAlertEvents("MISS");
slaRegBean1.setJobData("jobData");
// 1 hour back
Date startTime = new Date(System.currentTimeMillis() - 1 * 1 * 3600 * 1000);
// 1 hour back
Date endTime = new Date(System.currentTimeMillis() + 2 * 1 * 3600 * 1000);
slaRegBean3.setExpectedStart(startTime);
slaRegBean3.setExpectedEnd(endTime);
slaCalcMemory.addRegistration(jobId1, slaRegBean1);
slaCalcMemory.addRegistration(jobId2, slaRegBean2);
slaCalcMemory.addRegistration(jobId3, slaRegBean3);
slaCalcMemory.updateAllSlaStatus();
SLACalcStatus calc1 = slaCalcMemory.get(jobId1);
SLACalcStatus calc2 = slaCalcMemory.get(jobId2);
SLACalcStatus calc3 = slaCalcMemory.get(jobId3);
calc1.setEventStatus(SLAEvent.EventStatus.END_MISS);
calc1.setSLAStatus(SLAEvent.SLAStatus.MISS);
calc1.setJobStatus(WorkflowJob.Status.FAILED.toString());
// set last modified time 5 days back
Date lastModifiedTime = new Date(System.currentTimeMillis() - 5 * 24 * 60 * 60 * 1000);
calc1.setLastModifiedTime(lastModifiedTime);
List<UpdateEntry> updateList = new ArrayList<UpdateEntry>();
WorkflowJobBean wf1 = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId1);
wf1.setId(jobId1);
wf1.setStatus(WorkflowJob.Status.SUCCEEDED);
wf1.setStartTime(sdf.parse("2011-03-09"));
wf1.setEndTime(sdf.parse("2011-03-10"));
wf1.setLastModifiedTime(new Date());
WorkflowJobBean wf2 = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId2);
wf2.setId(jobId2);
wf2.setStatus(WorkflowJob.Status.RUNNING);
wf2.setStartTime(sdf.parse("2011-03-09"));
wf2.setEndTime(null);
wf2.setLastModifiedTime(new Date());
WorkflowJobBean wf3 = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId3);
wf3.setId(jobId3);
wf3.setStatus(WorkflowJob.Status.RUNNING);
wf3.setStartTime(startTime);
wf3.setEndTime(null);
wf3.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW, wf1));
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW, wf2));
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW, wf3));
updateList.add(new UpdateEntry<SLASummaryQuery>(SLASummaryQuery.UPDATE_SLA_SUMMARY_ALL, new SLASummaryBean(calc2)));
updateList.add(new UpdateEntry<SLASummaryQuery>(SLASummaryQuery.UPDATE_SLA_SUMMARY_ALL, new SLASummaryBean(calc3)));
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, null);
slaCalcMemory = new SLACalculatorMemory();
slaCalcMemory.init(Services.get().get(ConfigurationService.class).getConf());
slaCalcMemory.updateAllSlaStatus();
SLACalcStatus calc = new SLACalcStatus(SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, jobId1), SLARegistrationQueryExecutor.getInstance().get(SLARegQuery.GET_SLA_REG_ON_RESTART, jobId1));
assertEquals("job-1-W", calc.getId());
assertEquals(AppType.WORKFLOW_JOB, calc.getAppType());
assertEquals("app-name", calc.getAppName());
assertEquals(123, calc.getExpectedDuration());
assertEquals(sdf.parse("2012-02-07"), calc.getExpectedEnd());
assertEquals(sdf.parse("2011-02-07"), calc.getExpectedStart());
assertEquals(sdf.parse("2012-01-06"), calc.getNominalTime());
assertEquals("user", calc.getUser());
assertEquals("parentId", calc.getParentId());
assertEquals("upstreamApps", calc.getUpstreamApps());
assertEquals("notificationMsg", calc.getNotificationMsg());
assertEquals("a@abc.com", calc.getAlertContact());
assertEquals("MISS", calc.getAlertEvents());
assertEquals("jobData", calc.getJobData());
assertEquals(sdf.parse("2011-03-09"), calc.getActualStart());
assertEquals(sdf.parse("2011-03-10"), calc.getActualEnd());
assertEquals(SLAEvent.EventStatus.END_MISS, calc1.getEventStatus());
assertEquals(SLAEvent.SLAStatus.MISS, calc1.getSLAStatus());
assertEquals(WorkflowJob.Status.FAILED.toString(), calc1.getJobStatus());
assertEquals(lastModifiedTime, calc1.getLastModifiedTime());
calc2 = new SLACalcStatus(SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, jobId2), SLARegistrationQueryExecutor.getInstance().get(SLARegQuery.GET_SLA_REG_ON_RESTART, jobId2));
assertEquals(8, calc.getEventProcessed());
assertEquals(7, calc2.getEventProcessed());
// jobId2 should be in history set as eventprocessed is 7 (111)
// job3 will be in slamap
// 1 out of 3 jobs in map
assertEquals(1, slaCalcMemory.size());
WorkflowJobBean wf = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId3);
wf.setId(jobId3);
wf.setStatus(WorkflowJob.Status.SUCCEEDED);
wf.setEndTime(endTime);
wf.setStartTime(startTime);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_START_END, wf);
slaCalcMemory.addJobStatus(jobId3, WorkflowJob.Status.SUCCEEDED.toString(), EventStatus.SUCCESS, startTime, endTime);
SLASummaryBean slaSummary = SLASummaryQueryExecutor.getInstance().get(SLASummaryQuery.GET_SLA_SUMMARY, jobId3);
assertEquals(8, slaSummary.getEventProcessed());
assertEquals(startTime, slaSummary.getActualStart());
assertEquals(endTime, slaSummary.getActualEnd());
assertEquals(WorkflowJob.Status.SUCCEEDED.toString(), slaSummary.getJobStatus());
}
Aggregations