use of org.apache.oozie.client.rest.JsonBean in project oozie by apache.
the class CoordMaterializeTransitionXCommand method performWrites.
/* (non-Javadoc)
* @see org.apache.oozie.command.MaterializeTransitionXCommand#performWrites()
*/
@Override
public void performWrites() throws CommandException {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
// register the partition related dependencies of actions
for (JsonBean actionBean : insertList) {
if (actionBean instanceof CoordinatorActionBean) {
CoordinatorActionBean coordAction = (CoordinatorActionBean) actionBean;
if (EventHandlerService.isEnabled()) {
CoordinatorXCommand.generateEvent(coordAction, coordJob.getUser(), coordJob.getAppName(), null);
}
// TODO: time 100s should be configurable
queue(new CoordActionNotificationXCommand(coordAction), 100);
// Delay for input check = (nominal time - now)
long checkDelay = coordAction.getNominalTime().getTime() - new Date().getTime();
queue(new CoordActionInputCheckXCommand(coordAction.getId(), coordAction.getJobId()), Math.max(checkDelay, 0));
if (!StringUtils.isEmpty(coordAction.getPushMissingDependencies())) {
// TODO: Delay in catchup mode?
queue(new CoordPushDependencyCheckXCommand(coordAction.getId(), true), 100);
}
}
}
} catch (JPAExecutorException jex) {
throw new CommandException(jex);
}
}
use of org.apache.oozie.client.rest.JsonBean in project oozie by apache.
the class CoordActionStartXCommand method execute.
@Override
protected Void execute() throws CommandException {
boolean makeFail = true;
String errCode = "";
String errMsg = "";
ParamChecker.notEmpty(user, "user");
log.debug("actionid=" + actionId + ", status=" + coordAction.getStatus());
if (coordAction.getStatus() == CoordinatorAction.Status.SUBMITTED) {
// log.debug("getting.. job id: " + coordAction.getJobId());
// create merged runConf to pass to WF Engine
Configuration runConf = mergeConfig(coordAction);
coordAction.setRunConf(XmlUtils.prettyPrint(runConf).toString());
// log.debug("%%% merged runconf=" +
// XmlUtils.prettyPrint(runConf).toString());
DagEngine dagEngine = Services.get().get(DagEngineService.class).getDagEngine(user);
try {
Configuration conf = new XConfiguration(new StringReader(coordAction.getRunConf()));
SLAEventBean slaEvent = SLADbOperations.createStatusEvent(coordAction.getSlaXml(), coordAction.getId(), Status.STARTED, SlaAppType.COORDINATOR_ACTION, log);
if (slaEvent != null) {
insertList.add(slaEvent);
}
if (OozieJobInfo.isJobInfoEnabled()) {
conf.set(OozieJobInfo.COORD_ID, actionId);
conf.set(OozieJobInfo.COORD_NAME, appName);
conf.set(OozieJobInfo.COORD_NOMINAL_TIME, coordAction.getNominalTimestamp().toString());
}
// Normalize workflow appPath here;
JobUtils.normalizeAppPath(conf.get(OozieClient.USER_NAME), conf.get(OozieClient.GROUP_NAME), conf);
if (coordAction.getExternalId() != null) {
conf.setBoolean(OozieClient.RERUN_FAIL_NODES, true);
dagEngine.reRun(coordAction.getExternalId(), conf);
} else {
// Pushing the nominal time in conf to use for launcher tag search
conf.set(OOZIE_COORD_ACTION_NOMINAL_TIME, String.valueOf(coordAction.getNominalTime().getTime()));
String wfId = dagEngine.submitJobFromCoordinator(conf, actionId);
coordAction.setExternalId(wfId);
}
coordAction.setStatus(CoordinatorAction.Status.RUNNING);
coordAction.incrementAndGetPending();
// store.updateCoordinatorAction(coordAction);
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
log.debug("Updating WF record for WFID :" + coordAction.getExternalId() + " with parent id: " + actionId);
WorkflowJobBean wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW_STARTTIME, coordAction.getExternalId());
wfJob.setParentId(actionId);
wfJob.setLastModifiedTime(new Date());
BatchQueryExecutor executor = BatchQueryExecutor.getInstance();
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_PARENT_MODIFIED, wfJob));
updateList.add(new UpdateEntry<CoordActionQuery>(CoordActionQuery.UPDATE_COORD_ACTION_FOR_START, coordAction));
try {
executor.executeBatchInsertUpdateDelete(insertList, updateList, null);
queue(new CoordActionNotificationXCommand(coordAction), 100);
if (EventHandlerService.isEnabled()) {
generateEvent(coordAction, user, appName, wfJob.getStartTime());
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
} else {
log.error(ErrorCode.E0610);
}
makeFail = false;
} catch (DagEngineException dee) {
errMsg = dee.getMessage();
errCode = dee.getErrorCode().toString();
log.warn("can not create DagEngine for submitting jobs", dee);
} catch (CommandException ce) {
errMsg = ce.getMessage();
errCode = ce.getErrorCode().toString();
log.warn("command exception occurred ", ce);
} catch (java.io.IOException ioe) {
errMsg = ioe.getMessage();
errCode = "E1005";
log.warn("Configuration parse error. read from DB :" + coordAction.getRunConf(), ioe);
} catch (Exception ex) {
errMsg = ex.getMessage();
errCode = "E1005";
log.warn("can not create DagEngine for submitting jobs", ex);
} finally {
if (makeFail == true) {
// No DB exception occurs
log.error("Failing the action " + coordAction.getId() + ". Because " + errCode + " : " + errMsg);
coordAction.setStatus(CoordinatorAction.Status.FAILED);
if (errMsg.length() > 254) {
// Because table column size is 255
errMsg = errMsg.substring(0, 255);
}
coordAction.setErrorMessage(errMsg);
coordAction.setErrorCode(errCode);
updateList = new ArrayList<UpdateEntry>();
updateList.add(new UpdateEntry<CoordActionQuery>(CoordActionQuery.UPDATE_COORD_ACTION_FOR_START, coordAction));
insertList = new ArrayList<JsonBean>();
SLAEventBean slaEvent = SLADbOperations.createStatusEvent(coordAction.getSlaXml(), coordAction.getId(), Status.FAILED, SlaAppType.COORDINATOR_ACTION, log);
if (slaEvent != null) {
// Update SLA events
insertList.add(slaEvent);
}
try {
// call JPAExecutor to do the bulk writes
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
if (EventHandlerService.isEnabled()) {
generateEvent(coordAction, user, appName, null);
}
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
queue(new CoordActionReadyXCommand(coordAction.getJobId()));
}
}
}
return null;
}
use of org.apache.oozie.client.rest.JsonBean in project oozie by apache.
the class BundleStartXCommand method updateBundleAction.
private void updateBundleAction() throws JPAExecutorException {
for (JsonBean bAction : insertList) {
BundleActionBean action = (BundleActionBean) bAction;
action.incrementAndGetPending();
action.setLastModifiedTime(new Date());
}
}
use of org.apache.oozie.client.rest.JsonBean in project oozie by apache.
the class TransitionXCommand method generateEvents.
/**
* This will be used to generate Job Notification events on status changes
*
* @param coordJob
* @param startTime
* @throws CommandException
*/
public void generateEvents(CoordinatorJobBean coordJob, Date startTime) throws CommandException {
for (UpdateEntry entry : updateList) {
JsonBean actionBean = entry.getBean();
if (actionBean instanceof CoordinatorActionBean) {
CoordinatorActionBean caBean = (CoordinatorActionBean) actionBean;
caBean.setJobId(coordJob.getId());
CoordinatorXCommand.generateEvent(caBean, coordJob.getUser(), coordJob.getAppName(), startTime);
}
// TODO generate Coord Job event
}
}
use of org.apache.oozie.client.rest.JsonBean in project oozie by apache.
the class SignalXCommand method startForkedActions.
public void startForkedActions(List<WorkflowActionBean> workflowActionBeanListForForked) throws CommandException {
List<CallableWrapper<ActionExecutorContext>> tasks = new ArrayList<CallableWrapper<ActionExecutorContext>>();
List<UpdateEntry> updateList = new ArrayList<UpdateEntry>();
List<JsonBean> insertList = new ArrayList<JsonBean>();
boolean endWorkflow = false;
boolean submitJobByQueuing = false;
for (WorkflowActionBean workflowActionBean : workflowActionBeanListForForked) {
LOG.debug("Starting forked actions parallely : " + workflowActionBean.getId());
tasks.add(Services.get().get(CallableQueueService.class).new CallableWrapper<ActionExecutorContext>(new ForkedActionStartXCommand(wfJob, workflowActionBean.getId(), workflowActionBean.getType()), 0));
}
try {
List<Future<ActionExecutorContext>> futures = Services.get().get(CallableQueueService.class).invokeAll(tasks);
for (Future<ActionExecutorContext> result : futures) {
if (result == null) {
submitJobByQueuing = true;
continue;
}
ActionExecutorContext context = result.get();
Map<String, String> contextVariableMap = ((ForkedActionExecutorContext) context).getContextMap();
LOG.debug("contextVariableMap size of action " + context.getAction().getId() + " is " + contextVariableMap.size());
for (String key : contextVariableMap.keySet()) {
context.setVarToWorkflow(key, contextVariableMap.get(key));
}
if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.FAILED)) {
LOG.warn("Action has failed, failing job" + context.getAction().getId());
new ActionStartXCommand(context.getAction().getId(), null).failJob(context);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction()));
if (context.isShouldEndWF()) {
endWorkflow = true;
}
}
if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.SUSPENDED)) {
LOG.warn("Action has failed, failing job" + context.getAction().getId());
new ActionStartXCommand(context.getAction().getId(), null).handleNonTransient(context, null, WorkflowAction.Status.START_MANUAL);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction()));
if (context.isShouldEndWF()) {
endWorkflow = true;
}
}
}
if (endWorkflow) {
endWF(insertList);
}
} catch (Exception e) {
LOG.error("Error running forked jobs parallely", e);
startForkedActionsByQueuing(workflowActionBeanListForForked);
submitJobByQueuing = false;
}
if (submitJobByQueuing && !endWorkflow) {
LOG.error("There is error in running forked jobs parallely");
startForkedActionsByQueuing(workflowActionBeanListForForked);
}
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
LOG.debug("forked actions submitted parallely");
}
Aggregations