use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class TestBatchQueryExecutor method testExecuteBatchUpdateInsertDelete.
public void testExecuteBatchUpdateInsertDelete() throws Exception {
BatchQueryExecutor executor = BatchQueryExecutor.getInstance();
// for update
CoordinatorJobBean coordJob = addRecordToCoordJobTable(CoordinatorJob.Status.PREP, true, true);
WorkflowJobBean wfJob = addRecordToWfJobTable(WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
WorkflowActionBean wfAction = addRecordToWfActionTable(wfJob.getId(), "1", WorkflowAction.Status.PREP);
// for insert
CoordinatorActionBean coordAction = new CoordinatorActionBean();
coordAction.setId("testCoordAction1");
JPAService jpaService = Services.get().get(JPAService.class);
// update the status
coordJob.setStatus(CoordinatorJob.Status.RUNNING);
wfJob.setStatus(WorkflowJob.Status.SUCCEEDED);
// update the list for doing bulk writes
List<UpdateEntry> updateList = new ArrayList<UpdateEntry>();
updateList.add(new UpdateEntry<CoordJobQuery>(CoordJobQuery.UPDATE_COORD_JOB_STATUS_MODTIME, coordJob));
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW, wfJob));
// insert beans
Collection<JsonBean> insertList = new ArrayList<JsonBean>();
insertList.add(coordAction);
// delete beans
Collection<JsonBean> deleteList = new ArrayList<JsonBean>();
deleteList.add(wfAction);
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, deleteList);
// check update after running ExecuteBatchUpdateInsertDelete
coordJob = CoordJobQueryExecutor.getInstance().get(CoordJobQuery.GET_COORD_JOB, coordJob.getId());
assertEquals("RUNNING", coordJob.getStatusStr());
wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, wfJob.getId());
assertEquals("SUCCEEDED", wfJob.getStatusStr());
coordAction = CoordActionQueryExecutor.getInstance().get(CoordActionQuery.GET_COORD_ACTION, coordAction.getId());
assertEquals("testCoordAction1", coordAction.getId());
try {
wfAction = WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION, wfJob.getId());
fail();
} catch (JPAExecutorException ex) {
assertEquals(ex.getErrorCode().toString(), "E0605");
}
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class SuspendXCommand method execute.
@Override
protected Void execute() throws CommandException {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
try {
suspendJob(this.jpaService, this.wfJobBean, this.wfid, null, updateList);
this.wfJobBean.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, this.wfJobBean));
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, null);
queue(new WorkflowNotificationXCommand(this.wfJobBean));
// Calling suspend recursively to handle parent workflow
suspendParentWorkFlow();
} catch (WorkflowException e) {
throw new CommandException(e);
} catch (JPAExecutorException je) {
throw new CommandException(je);
} finally {
updateParentIfNecessary(wfJobBean);
}
return null;
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class CoordActionStartXCommand method execute.
@Override
protected Void execute() throws CommandException {
boolean makeFail = true;
String errCode = "";
String errMsg = "";
ParamChecker.notEmpty(user, "user");
log.debug("actionid=" + actionId + ", status=" + coordAction.getStatus());
if (coordAction.getStatus() == CoordinatorAction.Status.SUBMITTED) {
// log.debug("getting.. job id: " + coordAction.getJobId());
// create merged runConf to pass to WF Engine
Configuration runConf = mergeConfig(coordAction);
coordAction.setRunConf(XmlUtils.prettyPrint(runConf).toString());
// log.debug("%%% merged runconf=" +
// XmlUtils.prettyPrint(runConf).toString());
DagEngine dagEngine = Services.get().get(DagEngineService.class).getDagEngine(user);
try {
Configuration conf = new XConfiguration(new StringReader(coordAction.getRunConf()));
SLAEventBean slaEvent = SLADbOperations.createStatusEvent(coordAction.getSlaXml(), coordAction.getId(), Status.STARTED, SlaAppType.COORDINATOR_ACTION, log);
if (slaEvent != null) {
insertList.add(slaEvent);
}
if (OozieJobInfo.isJobInfoEnabled()) {
conf.set(OozieJobInfo.COORD_ID, actionId);
conf.set(OozieJobInfo.COORD_NAME, appName);
conf.set(OozieJobInfo.COORD_NOMINAL_TIME, coordAction.getNominalTimestamp().toString());
}
// Normalize workflow appPath here;
JobUtils.normalizeAppPath(conf.get(OozieClient.USER_NAME), conf.get(OozieClient.GROUP_NAME), conf);
if (coordAction.getExternalId() != null) {
conf.setBoolean(OozieClient.RERUN_FAIL_NODES, true);
dagEngine.reRun(coordAction.getExternalId(), conf);
} else {
// Pushing the nominal time in conf to use for launcher tag search
conf.set(OOZIE_COORD_ACTION_NOMINAL_TIME, String.valueOf(coordAction.getNominalTime().getTime()));
String wfId = dagEngine.submitJobFromCoordinator(conf, actionId);
coordAction.setExternalId(wfId);
}
coordAction.setStatus(CoordinatorAction.Status.RUNNING);
coordAction.incrementAndGetPending();
// store.updateCoordinatorAction(coordAction);
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
log.debug("Updating WF record for WFID :" + coordAction.getExternalId() + " with parent id: " + actionId);
WorkflowJobBean wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW_STARTTIME, coordAction.getExternalId());
wfJob.setParentId(actionId);
wfJob.setLastModifiedTime(new Date());
BatchQueryExecutor executor = BatchQueryExecutor.getInstance();
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_PARENT_MODIFIED, wfJob));
updateList.add(new UpdateEntry<CoordActionQuery>(CoordActionQuery.UPDATE_COORD_ACTION_FOR_START, coordAction));
try {
executor.executeBatchInsertUpdateDelete(insertList, updateList, null);
queue(new CoordActionNotificationXCommand(coordAction), 100);
if (EventHandlerService.isEnabled()) {
generateEvent(coordAction, user, appName, wfJob.getStartTime());
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
} else {
log.error(ErrorCode.E0610);
}
makeFail = false;
} catch (DagEngineException dee) {
errMsg = dee.getMessage();
errCode = dee.getErrorCode().toString();
log.warn("can not create DagEngine for submitting jobs", dee);
} catch (CommandException ce) {
errMsg = ce.getMessage();
errCode = ce.getErrorCode().toString();
log.warn("command exception occurred ", ce);
} catch (java.io.IOException ioe) {
errMsg = ioe.getMessage();
errCode = "E1005";
log.warn("Configuration parse error. read from DB :" + coordAction.getRunConf(), ioe);
} catch (Exception ex) {
errMsg = ex.getMessage();
errCode = "E1005";
log.warn("can not create DagEngine for submitting jobs", ex);
} finally {
if (makeFail == true) {
// No DB exception occurs
log.error("Failing the action " + coordAction.getId() + ". Because " + errCode + " : " + errMsg);
coordAction.setStatus(CoordinatorAction.Status.FAILED);
if (errMsg.length() > 254) {
// Because table column size is 255
errMsg = errMsg.substring(0, 255);
}
coordAction.setErrorMessage(errMsg);
coordAction.setErrorCode(errCode);
updateList = new ArrayList<UpdateEntry>();
updateList.add(new UpdateEntry<CoordActionQuery>(CoordActionQuery.UPDATE_COORD_ACTION_FOR_START, coordAction));
insertList = new ArrayList<JsonBean>();
SLAEventBean slaEvent = SLADbOperations.createStatusEvent(coordAction.getSlaXml(), coordAction.getId(), Status.FAILED, SlaAppType.COORDINATOR_ACTION, log);
if (slaEvent != null) {
// Update SLA events
insertList.add(slaEvent);
}
try {
// call JPAExecutor to do the bulk writes
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
if (EventHandlerService.isEnabled()) {
generateEvent(coordAction, user, appName, null);
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
queue(new CoordActionReadyXCommand(coordAction.getJobId()));
}
}
}
return null;
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class ActionCheckXCommand method execute.
@Override
protected Void execute() throws CommandException {
LOG.debug("STARTED ActionCheckXCommand for wf actionId=" + actionId + " priority =" + getPriority());
ActionExecutorContext context = null;
boolean execSynchronous = false;
try {
boolean isRetry = false;
if (wfAction.getRetries() > 0) {
isRetry = true;
}
boolean isUserRetry = false;
context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
incrActionCounter(wfAction.getType(), 1);
Instrumentation.Cron cron = new Instrumentation.Cron();
cron.start();
executor.check(context, wfAction);
cron.stop();
addActionCron(wfAction.getType(), cron);
if (wfAction.isExecutionComplete()) {
if (!context.isExecuted()) {
LOG.warn(XLog.OPS, "Action Completed, ActionExecutor [{0}] must call setExecutionData()", executor.getType());
wfAction.setErrorInfo(EXEC_DATA_MISSING, "Execution Complete, but Execution Data Missing from Action");
failJob(context);
generateEvent = true;
} else {
wfAction.setPending();
execSynchronous = true;
}
}
wfAction.setLastCheckTime(new Date());
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_CHECK, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} catch (ActionExecutorException ex) {
LOG.warn("Exception while executing check(). Error Code [{0}], Message[{1}]", ex.getErrorCode(), ex.getMessage(), ex);
wfAction.setErrorInfo(ex.getErrorCode(), ex.getMessage());
switch(ex.getErrorType()) {
case ERROR:
// If allowed to retry, this will handle it; otherwise, we should fall through to FAILED
if (handleUserRetry(context, wfAction)) {
break;
}
case FAILED:
failJob(context, wfAction);
generateEvent = true;
break;
case // retry N times, then suspend workflow
TRANSIENT:
if (!handleTransient(context, executor, WorkflowAction.Status.RUNNING)) {
handleNonTransient(context, executor, WorkflowAction.Status.START_MANUAL);
generateEvent = true;
wfAction.setPendingAge(new Date());
wfAction.setRetries(0);
wfAction.setStartTime(null);
}
break;
}
wfAction.setLastCheckTime(new Date());
updateList = new ArrayList<UpdateEntry>();
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_CHECK, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} finally {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, null);
if (generateEvent && EventHandlerService.isEnabled()) {
generateEvent(wfAction, wfJob.getUser());
}
if (execSynchronous) {
new ActionEndXCommand(wfAction.getId(), wfAction.getType()).call();
}
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
}
LOG.debug("ENDED ActionCheckXCommand for wf actionId=" + actionId + ", jobId=" + jobId);
return null;
}
use of org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery in project oozie by apache.
the class ActionEndXCommand method execute.
@Override
protected Void execute() throws CommandException {
LOG.debug("STARTED ActionEndXCommand for action " + actionId);
Configuration conf = wfJob.getWorkflowInstance().getConf();
int maxRetries = 0;
long retryInterval = 0;
if (!(executor instanceof ControlNodeActionExecutor)) {
maxRetries = conf.getInt(OozieClient.ACTION_MAX_RETRIES, executor.getMaxRetries());
retryInterval = conf.getLong(OozieClient.ACTION_RETRY_INTERVAL, executor.getRetryInterval());
}
executor.setMaxRetries(maxRetries);
executor.setRetryInterval(retryInterval);
boolean isRetry = false;
if (wfAction.getStatus() == WorkflowActionBean.Status.END_RETRY || wfAction.getStatus() == WorkflowActionBean.Status.END_MANUAL) {
isRetry = true;
}
boolean isUserRetry = false;
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
try {
LOG.debug("End, name [{0}] type [{1}] status[{2}] external status [{3}] signal value [{4}]", wfAction.getName(), wfAction.getType(), wfAction.getStatus(), wfAction.getExternalStatus(), wfAction.getSignalValue());
Instrumentation.Cron cron = new Instrumentation.Cron();
cron.start();
executor.end(context, wfAction);
cron.stop();
addActionCron(wfAction.getType(), cron);
incrActionCounter(wfAction.getType(), 1);
if (!context.isEnded()) {
LOG.warn(XLog.OPS, "Action Ended, ActionExecutor [{0}] must call setEndData()", executor.getType());
wfAction.setErrorInfo(END_DATA_MISSING, "Execution Ended, but End Data Missing from Action");
failJob(context);
} else {
wfAction.setRetries(0);
wfAction.setEndTime(new Date());
boolean shouldHandleUserRetry = false;
Status slaStatus = null;
switch(wfAction.getStatus()) {
case OK:
slaStatus = Status.SUCCEEDED;
break;
case KILLED:
slaStatus = Status.KILLED;
break;
case FAILED:
slaStatus = Status.FAILED;
shouldHandleUserRetry = true;
break;
case ERROR:
LOG.info("ERROR is considered as FAILED for SLA");
slaStatus = Status.KILLED;
shouldHandleUserRetry = true;
break;
default:
slaStatus = Status.FAILED;
shouldHandleUserRetry = true;
break;
}
if (!shouldHandleUserRetry || !handleUserRetry(context, wfAction)) {
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), slaStatus, SlaAppType.WORKFLOW_ACTION);
if (slaEvent != null) {
insertList.add(slaEvent);
}
}
}
WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
DagELFunctions.setActionInfo(wfInstance, wfAction);
wfJob.setWorkflowInstance(wfInstance);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} catch (ActionExecutorException ex) {
LOG.warn("Error ending action [{0}]. ErrorType [{1}], ErrorCode [{2}], Message [{3}]", wfAction.getName(), ex.getErrorType(), ex.getErrorCode(), ex.getMessage());
wfAction.setErrorInfo(ex.getErrorCode(), ex.getMessage());
wfAction.setEndTime(null);
switch(ex.getErrorType()) {
case TRANSIENT:
if (!handleTransient(context, executor, WorkflowAction.Status.END_RETRY)) {
handleNonTransient(context, executor, WorkflowAction.Status.END_MANUAL);
wfAction.setPendingAge(new Date());
wfAction.setRetries(0);
}
wfAction.setEndTime(null);
break;
case NON_TRANSIENT:
handleNonTransient(context, executor, WorkflowAction.Status.END_MANUAL);
wfAction.setEndTime(null);
break;
case ERROR:
handleError(context, executor, COULD_NOT_END, false, WorkflowAction.Status.ERROR);
break;
case FAILED:
failJob(context);
break;
}
WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
DagELFunctions.setActionInfo(wfInstance, wfAction);
wfJob.setWorkflowInstance(wfInstance);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} finally {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
if (!(executor instanceof ControlNodeActionExecutor) && EventHandlerService.isEnabled()) {
generateEvent(wfAction, wfJob.getUser());
}
new SignalXCommand(jobId, actionId).call();
}
LOG.debug("ENDED ActionEndXCommand for action " + actionId);
return null;
}
Aggregations