use of org.apache.oozie.service.ActionService in project oozie by apache.
the class SignalXCommand method execute.
@Override
protected Void execute() throws CommandException {
LOG.debug("STARTED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
WorkflowInstance workflowInstance = wfJob.getWorkflowInstance();
workflowInstance.setTransientVar(WorkflowStoreService.WORKFLOW_BEAN, wfJob);
WorkflowJob.Status prevStatus = wfJob.getStatus();
boolean completed = false, skipAction = false;
WorkflowActionBean syncAction = null;
List<WorkflowActionBean> workflowActionBeanListForForked = new ArrayList<WorkflowActionBean>();
if (wfAction == null) {
if (wfJob.getStatus() == WorkflowJob.Status.PREP) {
try {
completed = workflowInstance.start();
} catch (WorkflowException e) {
throw new CommandException(e);
}
wfJob.setStatus(WorkflowJob.Status.RUNNING);
wfJob.setStartTime(new Date());
wfJob.setWorkflowInstance(workflowInstance);
generateEvent = true;
// 1. Add SLA status event for WF-JOB with status STARTED
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, Status.STARTED, SlaAppType.WORKFLOW_JOB);
if (slaEvent != null) {
insertList.add(slaEvent);
}
// 2. Add SLA registration events for all WF_ACTIONS
createSLARegistrationForAllActions(workflowInstance.getApp().getDefinition(), wfJob.getUser(), wfJob.getGroup(), wfJob.getConf());
queue(new WorkflowNotificationXCommand(wfJob));
} else {
throw new CommandException(ErrorCode.E0801, wfJob.getId());
}
} else {
WorkflowInstance.Status initialStatus = workflowInstance.getStatus();
String skipVar = workflowInstance.getVar(wfAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
if (skipVar != null) {
skipAction = skipVar.equals("true");
}
try {
completed = workflowInstance.signal(wfAction.getExecutionPath(), wfAction.getSignalValue());
} catch (WorkflowException e) {
LOG.error("Workflow action failed : " + e.getMessage(), e);
wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
completed = true;
}
wfJob.setWorkflowInstance(workflowInstance);
wfAction.resetPending();
if (!skipAction) {
wfAction.setTransition(workflowInstance.getTransition(wfAction.getName()));
queue(new WorkflowNotificationXCommand(wfJob, wfAction));
}
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS, wfAction));
WorkflowInstance.Status endStatus = workflowInstance.getStatus();
if (endStatus != initialStatus) {
generateEvent = true;
}
}
if (completed) {
try {
for (String actionToKillId : WorkflowStoreService.getActionsToKill(workflowInstance)) {
WorkflowActionBean actionToKill;
actionToKill = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, actionToKillId);
actionToKill.setPending();
actionToKill.setStatus(WorkflowActionBean.Status.KILLED);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToKill));
queue(new ActionKillXCommand(actionToKill.getId(), actionToKill.getType()));
}
for (String actionToFailId : WorkflowStoreService.getActionsToFail(workflowInstance)) {
WorkflowActionBean actionToFail = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_FAIL, actionToFailId);
actionToFail.resetPending();
actionToFail.setStatus(WorkflowActionBean.Status.FAILED);
if (wfJobErrorCode != null) {
wfJobErrorCode = actionToFail.getErrorCode();
wfJobErrorMsg = actionToFail.getErrorMessage();
}
queue(new WorkflowNotificationXCommand(wfJob, actionToFail));
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.FAILED, SlaAppType.WORKFLOW_ACTION);
if (slaEvent != null) {
insertList.add(slaEvent);
}
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToFail));
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
wfJob.setEndTime(new Date());
wfJob.setWorkflowInstance(workflowInstance);
Status slaStatus = Status.SUCCEEDED;
switch(wfJob.getStatus()) {
case SUCCEEDED:
slaStatus = Status.SUCCEEDED;
break;
case KILLED:
slaStatus = Status.KILLED;
break;
case FAILED:
slaStatus = Status.FAILED;
break;
default:
// TODO SUSPENDED
break;
}
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, slaStatus, SlaAppType.WORKFLOW_JOB);
if (slaEvent != null) {
insertList.add(slaEvent);
}
queue(new WorkflowNotificationXCommand(wfJob));
if (wfJob.getStatus() == WorkflowJob.Status.SUCCEEDED) {
InstrumentUtils.incrJobCounter(INSTR_SUCCEEDED_JOBS_COUNTER_NAME, 1, getInstrumentation());
}
// output message for Kill node
if (wfAction != null) {
// wfAction could be a no-op job
NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath());
if (nodeDef != null && nodeDef instanceof KillNodeDef) {
boolean isRetry = false;
boolean isUserRetry = false;
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
InstrumentUtils.incrJobCounter(INSTR_KILLED_JOBS_COUNTER_NAME, 1, getInstrumentation());
try {
String tmpNodeConf = nodeDef.getConf();
String message = context.getELEvaluator().evaluate(tmpNodeConf, String.class);
LOG.debug("Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], " + "after resolve [{3}]", jobId, actionId, tmpNodeConf, message);
if (wfAction.getErrorCode() != null) {
wfAction.setErrorInfo(wfAction.getErrorCode(), message);
} else {
wfAction.setErrorInfo(ErrorCode.E0729.toString(), message);
}
} catch (Exception ex) {
LOG.warn("Exception in SignalXCommand when processing Kill node message: {0}", ex.getMessage(), ex);
wfAction.setErrorInfo(ErrorCode.E0756.toString(), ErrorCode.E0756.format(ex.getMessage()));
wfAction.setStatus(WorkflowAction.Status.ERROR);
}
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS_ERROR, wfAction));
}
}
} else {
for (WorkflowActionBean newAction : WorkflowStoreService.getActionsToStart(workflowInstance)) {
boolean isOldWFAction = false;
// old action. To avoid twice entry for same action, Checking in Db if the workflow action already exist.
if (SubWorkflowActionExecutor.ACTION_TYPE.equals(newAction.getType())) {
try {
WorkflowActionBean oldAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_CHECK, newAction.getId());
newAction.setExternalId(oldAction.getExternalId());
newAction.setCreatedTime(oldAction.getCreatedTime());
isOldWFAction = true;
} catch (JPAExecutorException e) {
if (e.getErrorCode() != ErrorCode.E0605) {
throw new CommandException(e);
}
}
}
String skipVar = workflowInstance.getVar(newAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
boolean skipNewAction = false, suspendNewAction = false;
if (skipVar != null) {
skipNewAction = skipVar.equals("true");
}
if (skipNewAction) {
WorkflowActionBean oldAction = new WorkflowActionBean();
oldAction.setId(newAction.getId());
oldAction.setPending();
oldAction.setExecutionPath(newAction.getExecutionPath());
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING, oldAction));
queue(new SignalXCommand(jobId, oldAction.getId()));
} else {
if (!skipAction) {
try {
// Make sure that transition node for a forked action
// is inserted only once
WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, newAction.getId());
continue;
} catch (JPAExecutorException jee) {
}
}
suspendNewAction = checkForSuspendNode(newAction);
newAction.setPending();
String actionSlaXml = getActionSLAXml(newAction.getName(), workflowInstance.getApp().getDefinition(), wfJob.getConf());
newAction.setSlaXml(actionSlaXml);
if (!isOldWFAction) {
newAction.setCreatedTime(new Date());
insertList.add(newAction);
} else {
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, newAction));
}
LOG.debug("SignalXCommand: Name: " + newAction.getName() + ", Id: " + newAction.getId() + ", Authcode:" + newAction.getCred());
if (wfAction != null) {
// null during wf job submit
ActionService as = Services.get().get(ActionService.class);
ActionExecutor current = as.getExecutor(wfAction.getType());
LOG.trace("Current Action Type:" + current.getClass());
if (!suspendNewAction) {
if (current instanceof StartActionExecutor) {
// Excluding :start: here from executing first action synchronously since it
// blocks the consumer thread till the action is submitted to Hadoop,
// in turn reducing the number of new submissions the threads can accept.
// Would also be susceptible to longer delays in case Hadoop cluster is busy.
queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
} else if (current instanceof ForkActionExecutor) {
if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
workflowActionBeanListForForked.add(newAction);
} else {
queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
}
} else {
syncAction = newAction;
}
} else {
// will be ignored.
if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
workflowActionBeanListForForked.add(newAction);
}
}
} else {
// first action after wf submit should always be sync
syncAction = newAction;
}
}
}
}
try {
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_START_END, wfJob));
// call JPAExecutor to do the bulk writes
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
if (prevStatus != wfJob.getStatus()) {
LOG.debug("Updated the workflow status to " + wfJob.getId() + " status =" + wfJob.getStatusStr());
}
if (generateEvent && EventHandlerService.isEnabled()) {
generateEvent(wfJob, wfJobErrorCode, wfJobErrorMsg);
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
// undue delay from between end of previous and start of next action
if (wfJob.getStatus() != WorkflowJob.Status.RUNNING && wfJob.getStatus() != WorkflowJob.Status.SUSPENDED) {
// only for asynchronous actions, parent coord action's external id will
// persisted and following update will succeed.
updateParentIfNecessary(wfJob);
// To delete the WF temp dir
new WfEndXCommand(wfJob).call();
} else if (syncAction != null) {
new ActionStartXCommand(wfJob, syncAction.getId(), syncAction.getType()).call();
} else if (!workflowActionBeanListForForked.isEmpty() && !checkForSuspendNode(workflowActionBeanListForForked)) {
startForkedActions(workflowActionBeanListForForked);
}
LOG.debug("ENDED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
return null;
}
use of org.apache.oozie.service.ActionService in project oozie by apache.
the class LiteWorkflowValidator method checkActionNode.
private void checkActionNode(NodeDef node) throws WorkflowException {
try {
Element action = XmlUtils.parseXml(node.getConf());
ActionService actionService = Services.get().get(ActionService.class);
boolean supportedAction = actionService.hasActionType(action.getName());
if (!supportedAction) {
throw new WorkflowException(ErrorCode.E0723, node.getName(), action.getName());
}
} catch (JDOMException ex) {
throw new WorkflowException(ErrorCode.E0700, "JDOMException: " + ex.getMessage());
}
}
use of org.apache.oozie.service.ActionService in project oozie by apache.
the class KillXCommand method loadState.
@Override
protected void loadState() throws CommandException {
try {
jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
this.wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW_KILL, wfId);
this.actionList = jpaService.execute(new WorkflowActionsGetForJobJPAExecutor(wfId));
LogUtils.setLogInfo(wfJob);
} else {
throw new CommandException(ErrorCode.E0610);
}
actionService = Services.get().get(ActionService.class);
} catch (XException ex) {
throw new CommandException(ex);
}
}
Aggregations