Search in sources :

Example 6 with WorkflowInstance

use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.

the class XDataTestCase method createWorkflow.

/**
 * Create workflow job bean
 *
 * @param app workflow app
 * @param conf workflow configuration
 * @param authToken auth token
 * @param jobStatus workflow job status
 * @param instanceStatus workflow instance status
 * @return workflow job bean
 * @throws Exception thrown if unable to create workflow job bean
 */
protected WorkflowJobBean createWorkflow(WorkflowApp app, Configuration conf, WorkflowJob.Status jobStatus, WorkflowInstance.Status instanceStatus) throws Exception {
    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    Configuration protoActionConf = wps.createProtoActionConf(conf, true);
    WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
    WorkflowInstance wfInstance = workflowLib.createInstance(app, conf);
    ((LiteWorkflowInstance) wfInstance).setStatus(instanceStatus);
    WorkflowJobBean workflow = new WorkflowJobBean();
    workflow.setId(wfInstance.getId());
    workflow.setExternalId("extid");
    workflow.setAppName(app.getName());
    workflow.setAppPath(conf.get(OozieClient.APP_PATH));
    workflow.setConf(XmlUtils.prettyPrint(conf).toString());
    workflow.setProtoActionConf(XmlUtils.prettyPrint(protoActionConf).toString());
    workflow.setCreatedTime(new Date());
    workflow.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
    workflow.setStatus(jobStatus);
    workflow.setRun(0);
    workflow.setUser(conf.get(OozieClient.USER_NAME));
    workflow.setGroup(conf.get(OozieClient.GROUP_NAME));
    workflow.setWorkflowInstance(wfInstance);
    workflow.setSlaXml("<sla></sla>");
    return workflow;
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) WorkflowLib(org.apache.oozie.workflow.WorkflowLib) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) WorkflowStoreService(org.apache.oozie.service.WorkflowStoreService) LiteWorkflowStoreService(org.apache.oozie.service.LiteWorkflowStoreService) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) WorkflowJobBean(org.apache.oozie.WorkflowJobBean) Date(java.util.Date)

Example 7 with WorkflowInstance

use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.

the class TestDBWorkflowStore method _testUpdateWF.

private void _testUpdateWF() throws StoreException {
    wfBean1.setStatus(WorkflowJob.Status.SUCCEEDED);
    WorkflowInstance wfInstance = wfBean1.getWorkflowInstance();
    wfInstance.setVar("test", "hello");
    wfBean1.setWorkflowInstance(wfInstance);
    wfBean1.setExternalId("testExtId");
    store.getWorkflow(wfBean1.getId(), false);
    store.updateWorkflow(wfBean1);
    WorkflowJobBean wfBean = store.getWorkflow(wfBean1.getId(), false);
    assertEquals("hello", wfBean.getWorkflowInstance().getVar("test"));
    assertEquals(wfBean.getStatus(), WorkflowJob.Status.SUCCEEDED);
    store.commitTrx();
}
Also used : WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) WorkflowJobBean(org.apache.oozie.WorkflowJobBean)

Example 8 with WorkflowInstance

use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.

the class ReRunXCommand method setupReRun.

private void setupReRun() throws CommandException {
    InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
    LogUtils.setLogInfo(wfBean);
    WorkflowInstance oldWfInstance = this.wfBean.getWorkflowInstance();
    WorkflowInstance newWfInstance;
    String appPath = null;
    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    try {
        XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
        WorkflowApp app = wps.parseDef(conf, null);
        XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
        WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
        appPath = conf.get(OozieClient.APP_PATH);
        URI uri = new URI(appPath);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(wfBean.getUser(), uri, fsConf);
        Path configDefault = null;
        // app path could be a directory
        Path path = new Path(uri.getPath());
        if (!fs.isFile(path)) {
            configDefault = new Path(path, SubmitXCommand.CONFIG_DEFAULT);
        } else {
            configDefault = new Path(path.getParent(), SubmitXCommand.CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            Configuration defaultConf = new XConfiguration(fs.open(configDefault));
            PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
            XConfiguration.injectDefaults(defaultConf, conf);
        }
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
        // Resolving all variables in the job properties. This ensures the Hadoop Configuration semantics are
        // preserved. The Configuration.get function within XConfiguration.resolve() works recursively to get the
        // final value corresponding to a key in the map Resetting the conf to contain all the resolved values is
        // necessary to ensure propagation of Oozie properties to Hadoop calls downstream
        conf = ((XConfiguration) conf).resolve();
        try {
            newWfInstance = workflowLib.createInstance(app, conf, jobId);
        } catch (WorkflowException e) {
            throw new CommandException(e);
        }
        String appName = ELUtils.resolveAppName(app.getName(), conf);
        if (SLAService.isEnabled()) {
            Element wfElem = XmlUtils.parseXml(app.getDefinition());
            ELEvaluator evalSla = SubmitXCommand.createELEvaluatorForGroup(conf, "wf-sla-submit");
            Element eSla = XmlUtils.getSLAElement(wfElem);
            String jobSlaXml = null;
            if (eSla != null) {
                jobSlaXml = SubmitXCommand.resolveSla(eSla, evalSla);
            }
            writeSLARegistration(wfElem, jobSlaXml, newWfInstance.getId(), conf.get(SubWorkflowActionExecutor.PARENT_ID), conf.get(OozieClient.USER_NAME), appName, evalSla);
        }
        wfBean.setAppName(appName);
        wfBean.setProtoActionConf(protoActionConf.toXmlString());
    } catch (WorkflowException ex) {
        throw new CommandException(ex);
    } catch (IOException ex) {
        throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
    } catch (HadoopAccessorException ex) {
        throw new CommandException(ex);
    } catch (URISyntaxException ex) {
        throw new CommandException(ErrorCode.E0711, appPath, ex.getMessage(), ex);
    } catch (Exception ex) {
        throw new CommandException(ErrorCode.E1007, ex.getMessage(), ex);
    }
    for (int i = 0; i < actions.size(); i++) {
        // action will be used to rerun the job.
        if (!nodesToSkip.contains(actions.get(i).getName()) && !(conf.getBoolean(OozieClient.RERUN_FAIL_NODES, false) && SubWorkflowActionExecutor.ACTION_TYPE.equals(actions.get(i).getType()))) {
            deleteList.add(actions.get(i));
            LOG.info("Deleting Action[{0}] for re-run", actions.get(i).getId());
        } else {
            copyActionData(newWfInstance, oldWfInstance);
        }
    }
    wfBean.setAppPath(conf.get(OozieClient.APP_PATH));
    wfBean.setConf(XmlUtils.prettyPrint(conf).toString());
    wfBean.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
    wfBean.setUser(conf.get(OozieClient.USER_NAME));
    String group = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.JOB_ACL, OozieClient.GROUP_NAME, null);
    wfBean.setGroup(group);
    wfBean.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
    wfBean.setEndTime(null);
    wfBean.setRun(wfBean.getRun() + 1);
    wfBean.setStatus(WorkflowJob.Status.PREP);
    wfBean.setWorkflowInstance(newWfInstance);
    try {
        wfBean.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_RERUN, wfBean));
        // call JPAExecutor to do the bulk writes
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, deleteList);
    } catch (JPAExecutorException je) {
        throw new CommandException(je);
    } finally {
        updateParentIfNecessary(wfBean);
    }
}
Also used : WorkflowApp(org.apache.oozie.workflow.WorkflowApp) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) Element(org.jdom.Element) URISyntaxException(java.net.URISyntaxException) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) URI(java.net.URI) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) FileSystem(org.apache.hadoop.fs.FileSystem) ELEvaluator(org.apache.oozie.util.ELEvaluator) Path(org.apache.hadoop.fs.Path) WorkflowLib(org.apache.oozie.workflow.WorkflowLib) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) WorkflowStoreService(org.apache.oozie.service.WorkflowStoreService) WorkflowException(org.apache.oozie.workflow.WorkflowException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) IOException(java.io.IOException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) URISyntaxException(java.net.URISyntaxException) JDOMException(org.jdom.JDOMException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) WorkflowException(org.apache.oozie.workflow.WorkflowException) Date(java.util.Date) XConfiguration(org.apache.oozie.util.XConfiguration) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery)

Example 9 with WorkflowInstance

use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.

the class KillXCommand method execute.

@Override
protected Void execute() throws CommandException {
    LOG.info("STARTED WorkflowKillXCommand for jobId=" + wfId);
    wfJob.setEndTime(new Date());
    if (wfJob.getStatus() != WorkflowJob.Status.FAILED) {
        InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
        wfJob.setStatus(WorkflowJob.Status.KILLED);
        SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), wfJob.getId(), Status.KILLED, SlaAppType.WORKFLOW_JOB);
        if (slaEvent != null) {
            insertList.add(slaEvent);
        }
        try {
            wfJob.getWorkflowInstance().kill();
        } catch (WorkflowException e) {
            throw new CommandException(ErrorCode.E0725, e.getMessage(), e);
        }
        WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
        ((LiteWorkflowInstance) wfInstance).setStatus(WorkflowInstance.Status.KILLED);
        wfJob.setWorkflowInstance(wfInstance);
    }
    try {
        for (WorkflowActionBean action : actionList) {
            if (action.getStatus() == WorkflowActionBean.Status.RUNNING || action.getStatus() == WorkflowActionBean.Status.DONE) {
                if (!(actionService.getExecutor(action.getType()) instanceof ControlNodeActionExecutor)) {
                    action.setPending();
                }
                action.setStatus(WorkflowActionBean.Status.KILLED);
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, action));
                queue(new ActionKillXCommand(action.getId(), action.getType()));
            } else if (action.getStatus() == WorkflowActionBean.Status.PREP || action.getStatus() == WorkflowActionBean.Status.START_RETRY || action.getStatus() == WorkflowActionBean.Status.START_MANUAL || action.getStatus() == WorkflowActionBean.Status.END_RETRY || action.getStatus() == WorkflowActionBean.Status.END_MANUAL || action.getStatus() == WorkflowActionBean.Status.USER_RETRY) {
                action.setStatus(WorkflowActionBean.Status.KILLED);
                action.resetPending();
                SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(action.getSlaXml(), action.getId(), Status.KILLED, SlaAppType.WORKFLOW_ACTION);
                if (slaEvent != null) {
                    insertList.add(slaEvent);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, action));
                if (EventHandlerService.isEnabled() && !(actionService.getExecutor(action.getType()) instanceof ControlNodeActionExecutor)) {
                    generateEvent(action, wfJob.getUser());
                }
            }
        }
        wfJob.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_END, wfJob));
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
        if (EventHandlerService.isEnabled()) {
            generateEvent(wfJob);
        }
        queue(new WorkflowNotificationXCommand(wfJob));
    } catch (JPAExecutorException e) {
        throw new CommandException(e);
    } finally {
        if (wfJob.getStatus() == WorkflowJob.Status.KILLED) {
            // To delete the WF temp dir
            new WfEndXCommand(wfJob).call();
        }
        updateParentIfNecessary(wfJob);
    }
    LOG.info("ENDED WorkflowKillXCommand for jobId=" + wfId);
    return null;
}
Also used : WorkflowActionQuery(org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery) UpdateEntry(org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry) WorkflowException(org.apache.oozie.workflow.WorkflowException) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) CommandException(org.apache.oozie.command.CommandException) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) Date(java.util.Date) SLAEventBean(org.apache.oozie.SLAEventBean) WorkflowActionBean(org.apache.oozie.WorkflowActionBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) ControlNodeActionExecutor(org.apache.oozie.action.control.ControlNodeActionExecutor)

Example 10 with WorkflowInstance

use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.

the class SignalXCommand method execute.

@Override
protected Void execute() throws CommandException {
    LOG.debug("STARTED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
    WorkflowInstance workflowInstance = wfJob.getWorkflowInstance();
    workflowInstance.setTransientVar(WorkflowStoreService.WORKFLOW_BEAN, wfJob);
    WorkflowJob.Status prevStatus = wfJob.getStatus();
    boolean completed = false, skipAction = false;
    WorkflowActionBean syncAction = null;
    List<WorkflowActionBean> workflowActionBeanListForForked = new ArrayList<WorkflowActionBean>();
    if (wfAction == null) {
        if (wfJob.getStatus() == WorkflowJob.Status.PREP) {
            try {
                completed = workflowInstance.start();
            } catch (WorkflowException e) {
                throw new CommandException(e);
            }
            wfJob.setStatus(WorkflowJob.Status.RUNNING);
            wfJob.setStartTime(new Date());
            wfJob.setWorkflowInstance(workflowInstance);
            generateEvent = true;
            // 1. Add SLA status event for WF-JOB with status STARTED
            SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, Status.STARTED, SlaAppType.WORKFLOW_JOB);
            if (slaEvent != null) {
                insertList.add(slaEvent);
            }
            // 2. Add SLA registration events for all WF_ACTIONS
            createSLARegistrationForAllActions(workflowInstance.getApp().getDefinition(), wfJob.getUser(), wfJob.getGroup(), wfJob.getConf());
            queue(new WorkflowNotificationXCommand(wfJob));
        } else {
            throw new CommandException(ErrorCode.E0801, wfJob.getId());
        }
    } else {
        WorkflowInstance.Status initialStatus = workflowInstance.getStatus();
        String skipVar = workflowInstance.getVar(wfAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
        if (skipVar != null) {
            skipAction = skipVar.equals("true");
        }
        try {
            completed = workflowInstance.signal(wfAction.getExecutionPath(), wfAction.getSignalValue());
        } catch (WorkflowException e) {
            LOG.error("Workflow action failed : " + e.getMessage(), e);
            wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
            completed = true;
        }
        wfJob.setWorkflowInstance(workflowInstance);
        wfAction.resetPending();
        if (!skipAction) {
            wfAction.setTransition(workflowInstance.getTransition(wfAction.getName()));
            queue(new WorkflowNotificationXCommand(wfJob, wfAction));
        }
        updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS, wfAction));
        WorkflowInstance.Status endStatus = workflowInstance.getStatus();
        if (endStatus != initialStatus) {
            generateEvent = true;
        }
    }
    if (completed) {
        try {
            for (String actionToKillId : WorkflowStoreService.getActionsToKill(workflowInstance)) {
                WorkflowActionBean actionToKill;
                actionToKill = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, actionToKillId);
                actionToKill.setPending();
                actionToKill.setStatus(WorkflowActionBean.Status.KILLED);
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToKill));
                queue(new ActionKillXCommand(actionToKill.getId(), actionToKill.getType()));
            }
            for (String actionToFailId : WorkflowStoreService.getActionsToFail(workflowInstance)) {
                WorkflowActionBean actionToFail = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_FAIL, actionToFailId);
                actionToFail.resetPending();
                actionToFail.setStatus(WorkflowActionBean.Status.FAILED);
                if (wfJobErrorCode != null) {
                    wfJobErrorCode = actionToFail.getErrorCode();
                    wfJobErrorMsg = actionToFail.getErrorMessage();
                }
                queue(new WorkflowNotificationXCommand(wfJob, actionToFail));
                SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.FAILED, SlaAppType.WORKFLOW_ACTION);
                if (slaEvent != null) {
                    insertList.add(slaEvent);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToFail));
            }
        } catch (JPAExecutorException je) {
            throw new CommandException(je);
        }
        wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
        wfJob.setEndTime(new Date());
        wfJob.setWorkflowInstance(workflowInstance);
        Status slaStatus = Status.SUCCEEDED;
        switch(wfJob.getStatus()) {
            case SUCCEEDED:
                slaStatus = Status.SUCCEEDED;
                break;
            case KILLED:
                slaStatus = Status.KILLED;
                break;
            case FAILED:
                slaStatus = Status.FAILED;
                break;
            default:
                // TODO SUSPENDED
                break;
        }
        SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, slaStatus, SlaAppType.WORKFLOW_JOB);
        if (slaEvent != null) {
            insertList.add(slaEvent);
        }
        queue(new WorkflowNotificationXCommand(wfJob));
        if (wfJob.getStatus() == WorkflowJob.Status.SUCCEEDED) {
            InstrumentUtils.incrJobCounter(INSTR_SUCCEEDED_JOBS_COUNTER_NAME, 1, getInstrumentation());
        }
        // output message for Kill node
        if (wfAction != null) {
            // wfAction could be a no-op job
            NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath());
            if (nodeDef != null && nodeDef instanceof KillNodeDef) {
                boolean isRetry = false;
                boolean isUserRetry = false;
                ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
                InstrumentUtils.incrJobCounter(INSTR_KILLED_JOBS_COUNTER_NAME, 1, getInstrumentation());
                try {
                    String tmpNodeConf = nodeDef.getConf();
                    String message = context.getELEvaluator().evaluate(tmpNodeConf, String.class);
                    LOG.debug("Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], " + "after resolve [{3}]", jobId, actionId, tmpNodeConf, message);
                    if (wfAction.getErrorCode() != null) {
                        wfAction.setErrorInfo(wfAction.getErrorCode(), message);
                    } else {
                        wfAction.setErrorInfo(ErrorCode.E0729.toString(), message);
                    }
                } catch (Exception ex) {
                    LOG.warn("Exception in SignalXCommand when processing Kill node message: {0}", ex.getMessage(), ex);
                    wfAction.setErrorInfo(ErrorCode.E0756.toString(), ErrorCode.E0756.format(ex.getMessage()));
                    wfAction.setStatus(WorkflowAction.Status.ERROR);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS_ERROR, wfAction));
            }
        }
    } else {
        for (WorkflowActionBean newAction : WorkflowStoreService.getActionsToStart(workflowInstance)) {
            boolean isOldWFAction = false;
            // old action. To avoid twice entry for same action, Checking in Db if the workflow action already exist.
            if (SubWorkflowActionExecutor.ACTION_TYPE.equals(newAction.getType())) {
                try {
                    WorkflowActionBean oldAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_CHECK, newAction.getId());
                    newAction.setExternalId(oldAction.getExternalId());
                    newAction.setCreatedTime(oldAction.getCreatedTime());
                    isOldWFAction = true;
                } catch (JPAExecutorException e) {
                    if (e.getErrorCode() != ErrorCode.E0605) {
                        throw new CommandException(e);
                    }
                }
            }
            String skipVar = workflowInstance.getVar(newAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
            boolean skipNewAction = false, suspendNewAction = false;
            if (skipVar != null) {
                skipNewAction = skipVar.equals("true");
            }
            if (skipNewAction) {
                WorkflowActionBean oldAction = new WorkflowActionBean();
                oldAction.setId(newAction.getId());
                oldAction.setPending();
                oldAction.setExecutionPath(newAction.getExecutionPath());
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING, oldAction));
                queue(new SignalXCommand(jobId, oldAction.getId()));
            } else {
                if (!skipAction) {
                    try {
                        // Make sure that transition node for a forked action
                        // is inserted only once
                        WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, newAction.getId());
                        continue;
                    } catch (JPAExecutorException jee) {
                    }
                }
                suspendNewAction = checkForSuspendNode(newAction);
                newAction.setPending();
                String actionSlaXml = getActionSLAXml(newAction.getName(), workflowInstance.getApp().getDefinition(), wfJob.getConf());
                newAction.setSlaXml(actionSlaXml);
                if (!isOldWFAction) {
                    newAction.setCreatedTime(new Date());
                    insertList.add(newAction);
                } else {
                    updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, newAction));
                }
                LOG.debug("SignalXCommand: Name: " + newAction.getName() + ", Id: " + newAction.getId() + ", Authcode:" + newAction.getCred());
                if (wfAction != null) {
                    // null during wf job submit
                    ActionService as = Services.get().get(ActionService.class);
                    ActionExecutor current = as.getExecutor(wfAction.getType());
                    LOG.trace("Current Action Type:" + current.getClass());
                    if (!suspendNewAction) {
                        if (current instanceof StartActionExecutor) {
                            // Excluding :start: here from executing first action synchronously since it
                            // blocks the consumer thread till the action is submitted to Hadoop,
                            // in turn reducing the number of new submissions the threads can accept.
                            // Would also be susceptible to longer delays in case Hadoop cluster is busy.
                            queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
                        } else if (current instanceof ForkActionExecutor) {
                            if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
                                workflowActionBeanListForForked.add(newAction);
                            } else {
                                queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
                            }
                        } else {
                            syncAction = newAction;
                        }
                    } else {
                        // will be ignored.
                        if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
                            workflowActionBeanListForForked.add(newAction);
                        }
                    }
                } else {
                    // first action after wf submit should always be sync
                    syncAction = newAction;
                }
            }
        }
    }
    try {
        wfJob.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_START_END, wfJob));
        // call JPAExecutor to do the bulk writes
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
        if (prevStatus != wfJob.getStatus()) {
            LOG.debug("Updated the workflow status to " + wfJob.getId() + "  status =" + wfJob.getStatusStr());
        }
        if (generateEvent && EventHandlerService.isEnabled()) {
            generateEvent(wfJob, wfJobErrorCode, wfJobErrorMsg);
        }
    } catch (JPAExecutorException je) {
        throw new CommandException(je);
    }
    // undue delay from between end of previous and start of next action
    if (wfJob.getStatus() != WorkflowJob.Status.RUNNING && wfJob.getStatus() != WorkflowJob.Status.SUSPENDED) {
        // only for asynchronous actions, parent coord action's external id will
        // persisted and following update will succeed.
        updateParentIfNecessary(wfJob);
        // To delete the WF temp dir
        new WfEndXCommand(wfJob).call();
    } else if (syncAction != null) {
        new ActionStartXCommand(wfJob, syncAction.getId(), syncAction.getType()).call();
    } else if (!workflowActionBeanListForForked.isEmpty() && !checkForSuspendNode(workflowActionBeanListForForked)) {
        startForkedActions(workflowActionBeanListForForked);
    }
    LOG.debug("ENDED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
    return null;
}
Also used : NodeDef(org.apache.oozie.workflow.lite.NodeDef) KillNodeDef(org.apache.oozie.workflow.lite.KillNodeDef) ArrayList(java.util.ArrayList) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) WorkflowActionBean(org.apache.oozie.WorkflowActionBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) WorkflowJob(org.apache.oozie.client.WorkflowJob) ForkedActionExecutorContext(org.apache.oozie.command.wf.ActionXCommand.ForkedActionExecutorContext) ActionExecutorContext(org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext) ActionService(org.apache.oozie.service.ActionService) Status(org.apache.oozie.client.SLAEvent.Status) SubWorkflowActionExecutor(org.apache.oozie.action.oozie.SubWorkflowActionExecutor) ForkActionExecutor(org.apache.oozie.action.control.ForkActionExecutor) StartActionExecutor(org.apache.oozie.action.control.StartActionExecutor) ActionExecutor(org.apache.oozie.action.ActionExecutor) WorkflowActionQuery(org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery) WorkflowException(org.apache.oozie.workflow.WorkflowException) CommandException(org.apache.oozie.command.CommandException) Date(java.util.Date) SLAEventBean(org.apache.oozie.SLAEventBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) CommandException(org.apache.oozie.command.CommandException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) XException(org.apache.oozie.XException) WorkflowException(org.apache.oozie.workflow.WorkflowException) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) StartActionExecutor(org.apache.oozie.action.control.StartActionExecutor) ForkActionExecutor(org.apache.oozie.action.control.ForkActionExecutor) KillNodeDef(org.apache.oozie.workflow.lite.KillNodeDef)

Aggregations

WorkflowInstance (org.apache.oozie.workflow.WorkflowInstance)34 WorkflowJobBean (org.apache.oozie.WorkflowJobBean)26 LiteWorkflowInstance (org.apache.oozie.workflow.lite.LiteWorkflowInstance)17 WorkflowActionBean (org.apache.oozie.WorkflowActionBean)16 Date (java.util.Date)15 JPAService (org.apache.oozie.service.JPAService)12 XConfiguration (org.apache.oozie.util.XConfiguration)12 JPAExecutorException (org.apache.oozie.executor.jpa.JPAExecutorException)11 CommandException (org.apache.oozie.command.CommandException)10 WorkflowJobGetJPAExecutor (org.apache.oozie.executor.jpa.WorkflowJobGetJPAExecutor)10 WorkflowActionGetJPAExecutor (org.apache.oozie.executor.jpa.WorkflowActionGetJPAExecutor)9 WorkflowStoreService (org.apache.oozie.service.WorkflowStoreService)9 WorkflowLib (org.apache.oozie.workflow.WorkflowLib)9 Configuration (org.apache.hadoop.conf.Configuration)8 LiteWorkflowStoreService (org.apache.oozie.service.LiteWorkflowStoreService)7 WorkflowAppService (org.apache.oozie.service.WorkflowAppService)7 WorkflowException (org.apache.oozie.workflow.WorkflowException)7 ELEvaluator (org.apache.oozie.util.ELEvaluator)6 LiteWorkflowApp (org.apache.oozie.workflow.lite.LiteWorkflowApp)6 WorkflowJobQuery (org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery)5