Search in sources :

Example 1 with WorkflowJobQuery

use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.

the class ReRunXCommand method setupReRun.

private void setupReRun() throws CommandException {
    InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
    LogUtils.setLogInfo(wfBean);
    WorkflowInstance oldWfInstance = this.wfBean.getWorkflowInstance();
    WorkflowInstance newWfInstance;
    String appPath = null;
    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    try {
        XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
        WorkflowApp app = wps.parseDef(conf, null);
        XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
        WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
        appPath = conf.get(OozieClient.APP_PATH);
        URI uri = new URI(appPath);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(wfBean.getUser(), uri, fsConf);
        Path configDefault = null;
        // app path could be a directory
        Path path = new Path(uri.getPath());
        if (!fs.isFile(path)) {
            configDefault = new Path(path, SubmitXCommand.CONFIG_DEFAULT);
        } else {
            configDefault = new Path(path.getParent(), SubmitXCommand.CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            Configuration defaultConf = new XConfiguration(fs.open(configDefault));
            PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
            XConfiguration.injectDefaults(defaultConf, conf);
        }
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
        // Resolving all variables in the job properties. This ensures the Hadoop Configuration semantics are
        // preserved. The Configuration.get function within XConfiguration.resolve() works recursively to get the
        // final value corresponding to a key in the map Resetting the conf to contain all the resolved values is
        // necessary to ensure propagation of Oozie properties to Hadoop calls downstream
        conf = ((XConfiguration) conf).resolve();
        try {
            newWfInstance = workflowLib.createInstance(app, conf, jobId);
        } catch (WorkflowException e) {
            throw new CommandException(e);
        }
        String appName = ELUtils.resolveAppName(app.getName(), conf);
        if (SLAService.isEnabled()) {
            Element wfElem = XmlUtils.parseXml(app.getDefinition());
            ELEvaluator evalSla = SubmitXCommand.createELEvaluatorForGroup(conf, "wf-sla-submit");
            Element eSla = XmlUtils.getSLAElement(wfElem);
            String jobSlaXml = null;
            if (eSla != null) {
                jobSlaXml = SubmitXCommand.resolveSla(eSla, evalSla);
            }
            writeSLARegistration(wfElem, jobSlaXml, newWfInstance.getId(), conf.get(SubWorkflowActionExecutor.PARENT_ID), conf.get(OozieClient.USER_NAME), appName, evalSla);
        }
        wfBean.setAppName(appName);
        wfBean.setProtoActionConf(protoActionConf.toXmlString());
    } catch (WorkflowException ex) {
        throw new CommandException(ex);
    } catch (IOException ex) {
        throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
    } catch (HadoopAccessorException ex) {
        throw new CommandException(ex);
    } catch (URISyntaxException ex) {
        throw new CommandException(ErrorCode.E0711, appPath, ex.getMessage(), ex);
    } catch (Exception ex) {
        throw new CommandException(ErrorCode.E1007, ex.getMessage(), ex);
    }
    for (int i = 0; i < actions.size(); i++) {
        // action will be used to rerun the job.
        if (!nodesToSkip.contains(actions.get(i).getName()) && !(conf.getBoolean(OozieClient.RERUN_FAIL_NODES, false) && SubWorkflowActionExecutor.ACTION_TYPE.equals(actions.get(i).getType()))) {
            deleteList.add(actions.get(i));
            LOG.info("Deleting Action[{0}] for re-run", actions.get(i).getId());
        } else {
            copyActionData(newWfInstance, oldWfInstance);
        }
    }
    wfBean.setAppPath(conf.get(OozieClient.APP_PATH));
    wfBean.setConf(XmlUtils.prettyPrint(conf).toString());
    wfBean.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
    wfBean.setUser(conf.get(OozieClient.USER_NAME));
    String group = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.JOB_ACL, OozieClient.GROUP_NAME, null);
    wfBean.setGroup(group);
    wfBean.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
    wfBean.setEndTime(null);
    wfBean.setRun(wfBean.getRun() + 1);
    wfBean.setStatus(WorkflowJob.Status.PREP);
    wfBean.setWorkflowInstance(newWfInstance);
    try {
        wfBean.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_RERUN, wfBean));
        // call JPAExecutor to do the bulk writes
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, deleteList);
    } catch (JPAExecutorException je) {
        throw new CommandException(je);
    } finally {
        updateParentIfNecessary(wfBean);
    }
}
Also used : WorkflowApp(org.apache.oozie.workflow.WorkflowApp) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) Element(org.jdom.Element) URISyntaxException(java.net.URISyntaxException) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) URI(java.net.URI) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) FileSystem(org.apache.hadoop.fs.FileSystem) ELEvaluator(org.apache.oozie.util.ELEvaluator) Path(org.apache.hadoop.fs.Path) WorkflowLib(org.apache.oozie.workflow.WorkflowLib) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) WorkflowStoreService(org.apache.oozie.service.WorkflowStoreService) WorkflowException(org.apache.oozie.workflow.WorkflowException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) IOException(java.io.IOException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) URISyntaxException(java.net.URISyntaxException) JDOMException(org.jdom.JDOMException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) WorkflowException(org.apache.oozie.workflow.WorkflowException) Date(java.util.Date) XConfiguration(org.apache.oozie.util.XConfiguration) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery)

Example 2 with WorkflowJobQuery

use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.

the class KillXCommand method execute.

@Override
protected Void execute() throws CommandException {
    LOG.info("STARTED WorkflowKillXCommand for jobId=" + wfId);
    wfJob.setEndTime(new Date());
    if (wfJob.getStatus() != WorkflowJob.Status.FAILED) {
        InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
        wfJob.setStatus(WorkflowJob.Status.KILLED);
        SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), wfJob.getId(), Status.KILLED, SlaAppType.WORKFLOW_JOB);
        if (slaEvent != null) {
            insertList.add(slaEvent);
        }
        try {
            wfJob.getWorkflowInstance().kill();
        } catch (WorkflowException e) {
            throw new CommandException(ErrorCode.E0725, e.getMessage(), e);
        }
        WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
        ((LiteWorkflowInstance) wfInstance).setStatus(WorkflowInstance.Status.KILLED);
        wfJob.setWorkflowInstance(wfInstance);
    }
    try {
        for (WorkflowActionBean action : actionList) {
            if (action.getStatus() == WorkflowActionBean.Status.RUNNING || action.getStatus() == WorkflowActionBean.Status.DONE) {
                if (!(actionService.getExecutor(action.getType()) instanceof ControlNodeActionExecutor)) {
                    action.setPending();
                }
                action.setStatus(WorkflowActionBean.Status.KILLED);
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, action));
                queue(new ActionKillXCommand(action.getId(), action.getType()));
            } else if (action.getStatus() == WorkflowActionBean.Status.PREP || action.getStatus() == WorkflowActionBean.Status.START_RETRY || action.getStatus() == WorkflowActionBean.Status.START_MANUAL || action.getStatus() == WorkflowActionBean.Status.END_RETRY || action.getStatus() == WorkflowActionBean.Status.END_MANUAL || action.getStatus() == WorkflowActionBean.Status.USER_RETRY) {
                action.setStatus(WorkflowActionBean.Status.KILLED);
                action.resetPending();
                SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(action.getSlaXml(), action.getId(), Status.KILLED, SlaAppType.WORKFLOW_ACTION);
                if (slaEvent != null) {
                    insertList.add(slaEvent);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, action));
                if (EventHandlerService.isEnabled() && !(actionService.getExecutor(action.getType()) instanceof ControlNodeActionExecutor)) {
                    generateEvent(action, wfJob.getUser());
                }
            }
        }
        wfJob.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_END, wfJob));
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
        if (EventHandlerService.isEnabled()) {
            generateEvent(wfJob);
        }
        queue(new WorkflowNotificationXCommand(wfJob));
    } catch (JPAExecutorException e) {
        throw new CommandException(e);
    } finally {
        if (wfJob.getStatus() == WorkflowJob.Status.KILLED) {
            // To delete the WF temp dir
            new WfEndXCommand(wfJob).call();
        }
        updateParentIfNecessary(wfJob);
    }
    LOG.info("ENDED WorkflowKillXCommand for jobId=" + wfId);
    return null;
}
Also used : WorkflowActionQuery(org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery) UpdateEntry(org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry) WorkflowException(org.apache.oozie.workflow.WorkflowException) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) CommandException(org.apache.oozie.command.CommandException) LiteWorkflowInstance(org.apache.oozie.workflow.lite.LiteWorkflowInstance) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) Date(java.util.Date) SLAEventBean(org.apache.oozie.SLAEventBean) WorkflowActionBean(org.apache.oozie.WorkflowActionBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) ControlNodeActionExecutor(org.apache.oozie.action.control.ControlNodeActionExecutor)

Example 3 with WorkflowJobQuery

use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.

the class SignalXCommand method execute.

@Override
protected Void execute() throws CommandException {
    LOG.debug("STARTED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
    WorkflowInstance workflowInstance = wfJob.getWorkflowInstance();
    workflowInstance.setTransientVar(WorkflowStoreService.WORKFLOW_BEAN, wfJob);
    WorkflowJob.Status prevStatus = wfJob.getStatus();
    boolean completed = false, skipAction = false;
    WorkflowActionBean syncAction = null;
    List<WorkflowActionBean> workflowActionBeanListForForked = new ArrayList<WorkflowActionBean>();
    if (wfAction == null) {
        if (wfJob.getStatus() == WorkflowJob.Status.PREP) {
            try {
                completed = workflowInstance.start();
            } catch (WorkflowException e) {
                throw new CommandException(e);
            }
            wfJob.setStatus(WorkflowJob.Status.RUNNING);
            wfJob.setStartTime(new Date());
            wfJob.setWorkflowInstance(workflowInstance);
            generateEvent = true;
            // 1. Add SLA status event for WF-JOB with status STARTED
            SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, Status.STARTED, SlaAppType.WORKFLOW_JOB);
            if (slaEvent != null) {
                insertList.add(slaEvent);
            }
            // 2. Add SLA registration events for all WF_ACTIONS
            createSLARegistrationForAllActions(workflowInstance.getApp().getDefinition(), wfJob.getUser(), wfJob.getGroup(), wfJob.getConf());
            queue(new WorkflowNotificationXCommand(wfJob));
        } else {
            throw new CommandException(ErrorCode.E0801, wfJob.getId());
        }
    } else {
        WorkflowInstance.Status initialStatus = workflowInstance.getStatus();
        String skipVar = workflowInstance.getVar(wfAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
        if (skipVar != null) {
            skipAction = skipVar.equals("true");
        }
        try {
            completed = workflowInstance.signal(wfAction.getExecutionPath(), wfAction.getSignalValue());
        } catch (WorkflowException e) {
            LOG.error("Workflow action failed : " + e.getMessage(), e);
            wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
            completed = true;
        }
        wfJob.setWorkflowInstance(workflowInstance);
        wfAction.resetPending();
        if (!skipAction) {
            wfAction.setTransition(workflowInstance.getTransition(wfAction.getName()));
            queue(new WorkflowNotificationXCommand(wfJob, wfAction));
        }
        updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS, wfAction));
        WorkflowInstance.Status endStatus = workflowInstance.getStatus();
        if (endStatus != initialStatus) {
            generateEvent = true;
        }
    }
    if (completed) {
        try {
            for (String actionToKillId : WorkflowStoreService.getActionsToKill(workflowInstance)) {
                WorkflowActionBean actionToKill;
                actionToKill = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, actionToKillId);
                actionToKill.setPending();
                actionToKill.setStatus(WorkflowActionBean.Status.KILLED);
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToKill));
                queue(new ActionKillXCommand(actionToKill.getId(), actionToKill.getType()));
            }
            for (String actionToFailId : WorkflowStoreService.getActionsToFail(workflowInstance)) {
                WorkflowActionBean actionToFail = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_FAIL, actionToFailId);
                actionToFail.resetPending();
                actionToFail.setStatus(WorkflowActionBean.Status.FAILED);
                if (wfJobErrorCode != null) {
                    wfJobErrorCode = actionToFail.getErrorCode();
                    wfJobErrorMsg = actionToFail.getErrorMessage();
                }
                queue(new WorkflowNotificationXCommand(wfJob, actionToFail));
                SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.FAILED, SlaAppType.WORKFLOW_ACTION);
                if (slaEvent != null) {
                    insertList.add(slaEvent);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToFail));
            }
        } catch (JPAExecutorException je) {
            throw new CommandException(je);
        }
        wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString()));
        wfJob.setEndTime(new Date());
        wfJob.setWorkflowInstance(workflowInstance);
        Status slaStatus = Status.SUCCEEDED;
        switch(wfJob.getStatus()) {
            case SUCCEEDED:
                slaStatus = Status.SUCCEEDED;
                break;
            case KILLED:
                slaStatus = Status.KILLED;
                break;
            case FAILED:
                slaStatus = Status.FAILED;
                break;
            default:
                // TODO SUSPENDED
                break;
        }
        SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, slaStatus, SlaAppType.WORKFLOW_JOB);
        if (slaEvent != null) {
            insertList.add(slaEvent);
        }
        queue(new WorkflowNotificationXCommand(wfJob));
        if (wfJob.getStatus() == WorkflowJob.Status.SUCCEEDED) {
            InstrumentUtils.incrJobCounter(INSTR_SUCCEEDED_JOBS_COUNTER_NAME, 1, getInstrumentation());
        }
        // output message for Kill node
        if (wfAction != null) {
            // wfAction could be a no-op job
            NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath());
            if (nodeDef != null && nodeDef instanceof KillNodeDef) {
                boolean isRetry = false;
                boolean isUserRetry = false;
                ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
                InstrumentUtils.incrJobCounter(INSTR_KILLED_JOBS_COUNTER_NAME, 1, getInstrumentation());
                try {
                    String tmpNodeConf = nodeDef.getConf();
                    String message = context.getELEvaluator().evaluate(tmpNodeConf, String.class);
                    LOG.debug("Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], " + "after resolve [{3}]", jobId, actionId, tmpNodeConf, message);
                    if (wfAction.getErrorCode() != null) {
                        wfAction.setErrorInfo(wfAction.getErrorCode(), message);
                    } else {
                        wfAction.setErrorInfo(ErrorCode.E0729.toString(), message);
                    }
                } catch (Exception ex) {
                    LOG.warn("Exception in SignalXCommand when processing Kill node message: {0}", ex.getMessage(), ex);
                    wfAction.setErrorInfo(ErrorCode.E0756.toString(), ErrorCode.E0756.format(ex.getMessage()));
                    wfAction.setStatus(WorkflowAction.Status.ERROR);
                }
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS_ERROR, wfAction));
            }
        }
    } else {
        for (WorkflowActionBean newAction : WorkflowStoreService.getActionsToStart(workflowInstance)) {
            boolean isOldWFAction = false;
            // old action. To avoid twice entry for same action, Checking in Db if the workflow action already exist.
            if (SubWorkflowActionExecutor.ACTION_TYPE.equals(newAction.getType())) {
                try {
                    WorkflowActionBean oldAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_CHECK, newAction.getId());
                    newAction.setExternalId(oldAction.getExternalId());
                    newAction.setCreatedTime(oldAction.getCreatedTime());
                    isOldWFAction = true;
                } catch (JPAExecutorException e) {
                    if (e.getErrorCode() != ErrorCode.E0605) {
                        throw new CommandException(e);
                    }
                }
            }
            String skipVar = workflowInstance.getVar(newAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP);
            boolean skipNewAction = false, suspendNewAction = false;
            if (skipVar != null) {
                skipNewAction = skipVar.equals("true");
            }
            if (skipNewAction) {
                WorkflowActionBean oldAction = new WorkflowActionBean();
                oldAction.setId(newAction.getId());
                oldAction.setPending();
                oldAction.setExecutionPath(newAction.getExecutionPath());
                updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING, oldAction));
                queue(new SignalXCommand(jobId, oldAction.getId()));
            } else {
                if (!skipAction) {
                    try {
                        // Make sure that transition node for a forked action
                        // is inserted only once
                        WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, newAction.getId());
                        continue;
                    } catch (JPAExecutorException jee) {
                    }
                }
                suspendNewAction = checkForSuspendNode(newAction);
                newAction.setPending();
                String actionSlaXml = getActionSLAXml(newAction.getName(), workflowInstance.getApp().getDefinition(), wfJob.getConf());
                newAction.setSlaXml(actionSlaXml);
                if (!isOldWFAction) {
                    newAction.setCreatedTime(new Date());
                    insertList.add(newAction);
                } else {
                    updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, newAction));
                }
                LOG.debug("SignalXCommand: Name: " + newAction.getName() + ", Id: " + newAction.getId() + ", Authcode:" + newAction.getCred());
                if (wfAction != null) {
                    // null during wf job submit
                    ActionService as = Services.get().get(ActionService.class);
                    ActionExecutor current = as.getExecutor(wfAction.getType());
                    LOG.trace("Current Action Type:" + current.getClass());
                    if (!suspendNewAction) {
                        if (current instanceof StartActionExecutor) {
                            // Excluding :start: here from executing first action synchronously since it
                            // blocks the consumer thread till the action is submitted to Hadoop,
                            // in turn reducing the number of new submissions the threads can accept.
                            // Would also be susceptible to longer delays in case Hadoop cluster is busy.
                            queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
                        } else if (current instanceof ForkActionExecutor) {
                            if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
                                workflowActionBeanListForForked.add(newAction);
                            } else {
                                queue(new ActionStartXCommand(newAction.getId(), newAction.getType()));
                            }
                        } else {
                            syncAction = newAction;
                        }
                    } else {
                        // will be ignored.
                        if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) {
                            workflowActionBeanListForForked.add(newAction);
                        }
                    }
                } else {
                    // first action after wf submit should always be sync
                    syncAction = newAction;
                }
            }
        }
    }
    try {
        wfJob.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_START_END, wfJob));
        // call JPAExecutor to do the bulk writes
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
        if (prevStatus != wfJob.getStatus()) {
            LOG.debug("Updated the workflow status to " + wfJob.getId() + "  status =" + wfJob.getStatusStr());
        }
        if (generateEvent && EventHandlerService.isEnabled()) {
            generateEvent(wfJob, wfJobErrorCode, wfJobErrorMsg);
        }
    } catch (JPAExecutorException je) {
        throw new CommandException(je);
    }
    // undue delay from between end of previous and start of next action
    if (wfJob.getStatus() != WorkflowJob.Status.RUNNING && wfJob.getStatus() != WorkflowJob.Status.SUSPENDED) {
        // only for asynchronous actions, parent coord action's external id will
        // persisted and following update will succeed.
        updateParentIfNecessary(wfJob);
        // To delete the WF temp dir
        new WfEndXCommand(wfJob).call();
    } else if (syncAction != null) {
        new ActionStartXCommand(wfJob, syncAction.getId(), syncAction.getType()).call();
    } else if (!workflowActionBeanListForForked.isEmpty() && !checkForSuspendNode(workflowActionBeanListForForked)) {
        startForkedActions(workflowActionBeanListForForked);
    }
    LOG.debug("ENDED SignalCommand for jobid=" + jobId + ", actionId=" + actionId);
    return null;
}
Also used : NodeDef(org.apache.oozie.workflow.lite.NodeDef) KillNodeDef(org.apache.oozie.workflow.lite.KillNodeDef) ArrayList(java.util.ArrayList) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) WorkflowActionBean(org.apache.oozie.WorkflowActionBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) WorkflowJob(org.apache.oozie.client.WorkflowJob) ForkedActionExecutorContext(org.apache.oozie.command.wf.ActionXCommand.ForkedActionExecutorContext) ActionExecutorContext(org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext) ActionService(org.apache.oozie.service.ActionService) Status(org.apache.oozie.client.SLAEvent.Status) SubWorkflowActionExecutor(org.apache.oozie.action.oozie.SubWorkflowActionExecutor) ForkActionExecutor(org.apache.oozie.action.control.ForkActionExecutor) StartActionExecutor(org.apache.oozie.action.control.StartActionExecutor) ActionExecutor(org.apache.oozie.action.ActionExecutor) WorkflowActionQuery(org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery) WorkflowException(org.apache.oozie.workflow.WorkflowException) CommandException(org.apache.oozie.command.CommandException) Date(java.util.Date) SLAEventBean(org.apache.oozie.SLAEventBean) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) CommandException(org.apache.oozie.command.CommandException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) XException(org.apache.oozie.XException) WorkflowException(org.apache.oozie.workflow.WorkflowException) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) StartActionExecutor(org.apache.oozie.action.control.StartActionExecutor) ForkActionExecutor(org.apache.oozie.action.control.ForkActionExecutor) KillNodeDef(org.apache.oozie.workflow.lite.KillNodeDef)

Example 4 with WorkflowJobQuery

use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.

the class ActionStartXCommand method updateJobLastModified.

protected void updateJobLastModified() {
    wfJob.setLastModifiedTime(new Date());
    updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
}
Also used : WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) Date(java.util.Date)

Example 5 with WorkflowJobQuery

use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.

the class TestBatchQueryExecutor method testExecuteBatchUpdateInsertDeleteRollBack.

public void testExecuteBatchUpdateInsertDeleteRollBack() throws Exception {
    BatchQueryExecutor executor = BatchQueryExecutor.getInstance();
    WorkflowJobBean job = addRecordToWfJobTable(WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
    WorkflowActionBean action1 = createWorkflowAction(job.getId(), "1", WorkflowAction.Status.PREP);
    WorkflowActionBean action2 = createWorkflowAction(job.getId(), "2", WorkflowAction.Status.PREP);
    job.setStatus(WorkflowJob.Status.RUNNING);
    Collection<JsonBean> insertList = new ArrayList<JsonBean>();
    insertList.add(action1);
    insertList.add(action2);
    List<UpdateEntry> updateList = new ArrayList<UpdateEntry>();
    // Add two actions to insert list
    updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW, job));
    // set fault injection to true, so transaction is roll backed
    setSystemProperty(FaultInjection.FAULT_INJECTION, "true");
    setSystemProperty(SkipCommitFaultInjection.ACTION_FAILOVER_FAULT_INJECTION, "true");
    FaultInjection.activate("org.apache.oozie.command.SkipCommitFaultInjection");
    try {
        executor.executeBatchInsertUpdateDelete(insertList, updateList, null);
        fail("Expected exception due to commit failure but didn't get any");
    } catch (Exception e) {
    }
    FaultInjection.deactivate("org.apache.oozie.command.SkipCommitFaultInjection");
    // Check whether transactions are rolled back or not
    WorkflowJobBean wfBean = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, job.getId());
    // status should not be RUNNING
    assertEquals("PREP", wfBean.getStatusStr());
    WorkflowActionBean waBean;
    try {
        waBean = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION, action1.getId());
        fail("Expected exception but didnt get any");
    } catch (JPAExecutorException jpaee) {
        assertEquals(ErrorCode.E0605, jpaee.getErrorCode());
    }
    try {
        waBean = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION, action2.getId());
        fail("Expected exception but didnt get any");
    } catch (JPAExecutorException jpaee) {
        assertEquals(ErrorCode.E0605, jpaee.getErrorCode());
    }
}
Also used : JsonBean(org.apache.oozie.client.rest.JsonBean) UpdateEntry(org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery) ArrayList(java.util.ArrayList) WorkflowJobBean(org.apache.oozie.WorkflowJobBean) WorkflowActionBean(org.apache.oozie.WorkflowActionBean)

Aggregations

WorkflowJobQuery (org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery)15 Date (java.util.Date)12 CommandException (org.apache.oozie.command.CommandException)10 JPAExecutorException (org.apache.oozie.executor.jpa.JPAExecutorException)10 UpdateEntry (org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry)9 WorkflowActionQuery (org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery)8 ArrayList (java.util.ArrayList)7 WorkflowActionBean (org.apache.oozie.WorkflowActionBean)7 WorkflowException (org.apache.oozie.workflow.WorkflowException)6 IOException (java.io.IOException)5 SLAEventBean (org.apache.oozie.SLAEventBean)5 WorkflowJobBean (org.apache.oozie.WorkflowJobBean)5 JsonBean (org.apache.oozie.client.rest.JsonBean)5 WorkflowInstance (org.apache.oozie.workflow.WorkflowInstance)5 PreconditionException (org.apache.oozie.command.PreconditionException)4 Configuration (org.apache.hadoop.conf.Configuration)3 ActionExecutorException (org.apache.oozie.action.ActionExecutorException)3 ControlNodeActionExecutor (org.apache.oozie.action.control.ControlNodeActionExecutor)3 ActionExecutorContext (org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext)3 JPAService (org.apache.oozie.service.JPAService)3