use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.
the class SubmitXCommand method execute.
@Override
protected String execute() throws CommandException {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
try {
XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
String user = conf.get(OozieClient.USER_NAME);
URI uri = new URI(conf.get(OozieClient.APP_PATH));
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(user, uri, fsConf);
Path configDefault = null;
Configuration defaultConf = null;
// app path could be a directory
Path path = new Path(uri.getPath());
if (!fs.isFile(path)) {
configDefault = new Path(path, CONFIG_DEFAULT);
} else {
configDefault = new Path(path.getParent(), CONFIG_DEFAULT);
}
if (fs.exists(configDefault)) {
try {
defaultConf = new XConfiguration(fs.open(configDefault));
PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
XConfiguration.injectDefaults(defaultConf, conf);
} catch (IOException ex) {
throw new IOException("default configuration file, " + ex.getMessage(), ex);
}
}
if (defaultConf != null) {
defaultConf = resolveDefaultConfVariables(defaultConf);
}
WorkflowApp app = wps.parseDef(conf, defaultConf);
XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
// Resolving all variables in the job properties.
// This ensures the Hadoop Configuration semantics is preserved.
XConfiguration resolvedVarsConf = new XConfiguration();
for (Map.Entry<String, String> entry : conf) {
resolvedVarsConf.set(entry.getKey(), conf.get(entry.getKey()));
}
conf = resolvedVarsConf;
WorkflowInstance wfInstance;
try {
wfInstance = workflowLib.createInstance(app, conf);
} catch (WorkflowException e) {
throw new StoreException(e);
}
Configuration conf = wfInstance.getConf();
// System.out.println("WF INSTANCE CONF:");
// System.out.println(XmlUtils.prettyPrint(conf).toString());
WorkflowJobBean workflow = new WorkflowJobBean();
workflow.setId(wfInstance.getId());
workflow.setAppName(ELUtils.resolveAppName(app.getName(), conf));
workflow.setAppPath(conf.get(OozieClient.APP_PATH));
workflow.setConf(XmlUtils.prettyPrint(conf).toString());
workflow.setProtoActionConf(protoActionConf.toXmlString());
workflow.setCreatedTime(new Date());
workflow.setLastModifiedTime(new Date());
workflow.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
workflow.setStatus(WorkflowJob.Status.PREP);
workflow.setRun(0);
workflow.setUser(conf.get(OozieClient.USER_NAME));
workflow.setGroup(conf.get(OozieClient.GROUP_NAME));
workflow.setWorkflowInstance(wfInstance);
workflow.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
// Set parent id if it doesn't already have one (for subworkflows)
if (workflow.getParentId() == null) {
workflow.setParentId(conf.get(SubWorkflowActionExecutor.PARENT_ID));
}
// Set to coord action Id if workflow submitted through coordinator
if (workflow.getParentId() == null) {
workflow.setParentId(parentId);
}
LogUtils.setLogInfo(workflow);
LOG.debug("Workflow record created, Status [{0}]", workflow.getStatus());
Element wfElem = XmlUtils.parseXml(app.getDefinition());
ELEvaluator evalSla = createELEvaluatorForGroup(conf, "wf-sla-submit");
String jobSlaXml = verifySlaElements(wfElem, evalSla);
if (!dryrun) {
writeSLARegistration(wfElem, jobSlaXml, workflow.getId(), workflow.getParentId(), workflow.getUser(), workflow.getGroup(), workflow.getAppName(), LOG, evalSla);
workflow.setSlaXml(jobSlaXml);
// System.out.println("SlaXml :"+ slaXml);
// store.insertWorkflow(workflow);
insertList.add(workflow);
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, null, null);
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
} else {
LOG.error(ErrorCode.E0610);
return null;
}
return workflow.getId();
} else {
// Checking variable substitution for dryrun
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, null, false, false);
Element workflowXml = XmlUtils.parseXml(app.getDefinition());
removeSlaElements(workflowXml);
String workflowXmlString = XmlUtils.removeComments(XmlUtils.prettyPrint(workflowXml).toString());
workflowXmlString = context.getELEvaluator().evaluate(workflowXmlString, String.class);
workflowXml = XmlUtils.parseXml(workflowXmlString);
Iterator<Element> it = workflowXml.getDescendants(new ElementFilter("job-xml"));
// Checking all variable substitutions in job-xml files
while (it.hasNext()) {
Element e = it.next();
String jobXml = e.getTextTrim();
Path xmlPath = new Path(workflow.getAppPath(), jobXml);
Configuration jobXmlConf = new XConfiguration(fs.open(xmlPath));
String jobXmlConfString = XmlUtils.prettyPrint(jobXmlConf).toString();
jobXmlConfString = XmlUtils.removeComments(jobXmlConfString);
context.getELEvaluator().evaluate(jobXmlConfString, String.class);
}
return "OK";
}
} catch (WorkflowException ex) {
throw new CommandException(ex);
} catch (HadoopAccessorException ex) {
throw new CommandException(ex);
} catch (Exception ex) {
throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
}
}
use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.
the class TestHadoopELFunctions method testELFunctionsReturningMapReduceStats.
public void testELFunctionsReturningMapReduceStats() throws Exception {
String counters = "{\"g\":{\"c\":10},\"org.apache.hadoop.mapred.JobInProgress$Counter\":" + "{\"TOTAL_LAUNCHED_REDUCES\":1,\"TOTAL_LAUNCHED_MAPS\":2,\"DATA_LOCAL_MAPS\":2},\"ACTION_TYPE\":\"MAP_REDUCE\"," + "\"FileSystemCounters\":{\"FILE_BYTES_READ\":38,\"HDFS_BYTES_READ\":19," + "\"FILE_BYTES_WRITTEN\":146,\"HDFS_BYTES_WRITTEN\":16}," + "\"org.apache.hadoop.mapred.Task$Counter\":{\"REDUCE_INPUT_GROUPS\":2," + "\"COMBINE_OUTPUT_RECORDS\":0,\"MAP_INPUT_RECORDS\":2,\"REDUCE_SHUFFLE_BYTES\":22," + "\"REDUCE_OUTPUT_RECORDS\":2,\"SPILLED_RECORDS\":4,\"MAP_OUTPUT_BYTES\":28," + "\"MAP_INPUT_BYTES\":12,\"MAP_OUTPUT_RECORDS\":2,\"COMBINE_INPUT_RECORDS\":0," + "\"REDUCE_INPUT_RECORDS\":2}}";
WorkflowJobBean workflow = new WorkflowJobBean();
workflow.setProtoActionConf("<configuration/>");
LiteWorkflowApp wfApp = new LiteWorkflowApp("x", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "a"));
wfApp.addNode(new EndNodeDef("a", LiteWorkflowStoreService.LiteControlNodeHandler.class));
WorkflowInstance wi = new LiteWorkflowInstance(wfApp, new XConfiguration(), "1");
workflow.setWorkflowInstance(wi);
workflow.setId(Services.get().get(UUIDService.class).generateId(ApplicationType.WORKFLOW));
final WorkflowActionBean action = new WorkflowActionBean();
action.setName("H");
ActionXCommand.ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, action, false, false);
context.setVar(MapReduceActionExecutor.HADOOP_COUNTERS, counters);
ELEvaluator eval = Services.get().get(ELService.class).createEvaluator("workflow");
DagELFunctions.configureEvaluator(eval, workflow, action);
String group = "g";
String name = "c";
assertEquals(new Long(10), eval.evaluate("${hadoop:counters('H')['" + group + "']['" + name + "']}", Long.class));
assertEquals(new Long(2), eval.evaluate("${hadoop:counters('H')[RECORDS][GROUPS]}", Long.class));
assertEquals(new Long(2), eval.evaluate("${hadoop:counters('H')[RECORDS][REDUCE_IN]}", Long.class));
assertEquals(new Long(2), eval.evaluate("${hadoop:counters('H')[RECORDS][REDUCE_OUT]}", Long.class));
assertEquals(new Long(2), eval.evaluate("${hadoop:counters('H')[RECORDS][MAP_IN]}", Long.class));
assertEquals(new Long(2), eval.evaluate("${hadoop:counters('H')[RECORDS][MAP_OUT]}", Long.class));
assertEquals(ActionType.MAP_REDUCE.toString(), eval.evaluate("${hadoop:counters('H')['ACTION_TYPE']}", String.class));
}
use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.
the class TestHadoopELFunctions method testELFunctionsReturningPigStats.
public void testELFunctionsReturningPigStats() throws Exception {
String pigStats = "{\"ACTION_TYPE\":\"PIG\"," + "\"PIG_VERSION\":\"0.9.0\"," + "\"FEATURES\":\"UNKNOWN\"," + "\"ERROR_MESSAGE\":null," + "\"NUMBER_JOBS\":\"2\"," + "\"RECORD_WRITTEN\":\"33\"," + "\"JOB_GRAPH\":\"job_201111300933_0004,job_201111300933_0005\"," + "\"job_201111300933_0004\":{\"MAP_INPUT_RECORDS\":\"33\",\"MIN_REDUCE_TIME\":\"0\",\"MULTI_STORE_COUNTERS\":{}," + "\"ERROR_MESSAGE\":null,\"JOB_ID\":\"job_201111300933_0004\"}," + "\"job_201111300933_0005\":{\"MAP_INPUT_RECORDS\":\"37\",\"MIN_REDUCE_TIME\":\"0\",\"MULTI_STORE_COUNTERS\":{}," + "\"ERROR_MESSAGE\":null,\"JOB_ID\":\"job_201111300933_0005\"}," + "\"BYTES_WRITTEN\":\"1410\"," + "\"HADOOP_VERSION\":\"0.20.2\"," + "\"RETURN_CODE\":\"0\"," + "\"ERROR_CODE\":\"-1\"," + "}";
WorkflowJobBean workflow = new WorkflowJobBean();
workflow.setProtoActionConf("<configuration/>");
LiteWorkflowApp wfApp = new LiteWorkflowApp("x", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "a"));
wfApp.addNode(new EndNodeDef("a", LiteWorkflowStoreService.LiteControlNodeHandler.class));
WorkflowInstance wi = new LiteWorkflowInstance(wfApp, new XConfiguration(), "1");
workflow.setWorkflowInstance(wi);
workflow.setId(Services.get().get(UUIDService.class).generateId(ApplicationType.WORKFLOW));
final WorkflowActionBean action = new WorkflowActionBean();
action.setName("H");
ActionXCommand.ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, action, false, false);
context.setVar(MapReduceActionExecutor.HADOOP_COUNTERS, pigStats);
ELEvaluator eval = Services.get().get(ELService.class).createEvaluator("workflow");
DagELFunctions.configureEvaluator(eval, workflow, action);
String version = "0.9.0";
String jobGraph = "job_201111300933_0004,job_201111300933_0005";
HashMap<String, String> job1StatusMap = new HashMap<String, String>();
job1StatusMap.put("\"MAP_INPUT_RECORDS\"", "\"33\"");
job1StatusMap.put("\"MIN_REDUCE_TIME\"", "\"0\"");
job1StatusMap.put("\"MULTI_STORE_COUNTERS\"", "{}");
job1StatusMap.put("\"ERROR_MESSAGE\"", "null");
job1StatusMap.put("\"JOB_ID\"", "\"job_201111300933_0004\"");
HashMap<String, String> job2StatusMap = new HashMap<String, String>();
job2StatusMap.put("\"MAP_INPUT_RECORDS\"", "\"37\"");
job2StatusMap.put("\"MIN_REDUCE_TIME\"", "\"0\"");
job2StatusMap.put("\"MULTI_STORE_COUNTERS\"", "{}");
job2StatusMap.put("\"ERROR_MESSAGE\"", "null");
job2StatusMap.put("\"JOB_ID\"", "\"job_201111300933_0005\"");
assertEquals(ActionType.PIG.toString(), eval.evaluate("${hadoop:counters('H')['ACTION_TYPE']}", String.class));
assertEquals(version, eval.evaluate("${hadoop:counters('H')['PIG_VERSION']}", String.class));
assertEquals(jobGraph, eval.evaluate("${hadoop:counters('H')['JOB_GRAPH']}", String.class));
String[] jobStatusItems = { "\"MAP_INPUT_RECORDS\"", "\"MIN_REDUCE_TIME\"", "\"MULTI_STORE_COUNTERS\"", "\"ERROR_MESSAGE\"", "\"JOB_ID\"" };
String job1StatusResult = eval.evaluate("${hadoop:counters('H')['job_201111300933_0004']}", String.class);
job1StatusResult = job1StatusResult.substring(job1StatusResult.indexOf('{') + 1, job1StatusResult.lastIndexOf('}'));
String[] job1StatusResArray = job1StatusResult.split(",");
HashMap<String, String> job1StatusResMap = new HashMap<String, String>();
for (String status : job1StatusResArray) {
String[] tmp = status.split(":");
job1StatusResMap.put(tmp[0], tmp[1]);
}
for (String item : jobStatusItems) {
assertEquals(job1StatusMap.get(item), job1StatusResMap.get(item));
}
String job2StatusResult = eval.evaluate("${hadoop:counters('H')['job_201111300933_0005']}", String.class);
job2StatusResult = job2StatusResult.substring(job2StatusResult.indexOf('{') + 1, job2StatusResult.lastIndexOf('}'));
String[] job2StatusResArray = job2StatusResult.split(",");
HashMap<String, String> job2StatusResMap = new HashMap<String, String>();
for (String status : job2StatusResArray) {
String[] tmp = status.split(":");
job2StatusResMap.put(tmp[0], tmp[1]);
}
for (String item : jobStatusItems) {
assertEquals(job2StatusMap.get(item), job2StatusResMap.get(item));
}
assertEquals(new Long(33), eval.evaluate("${hadoop:counters('H')['job_201111300933_0004']['MAP_INPUT_RECORDS']}", Long.class));
}
use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.
the class TestHadoopELFunctions method testHadoopConfFunctions.
public void testHadoopConfFunctions() throws Exception {
XConfiguration jobConf = new XConfiguration();
XConfiguration.copy(createJobConf(), jobConf);
String testHadoopOptionValue = jobConf.get("mapred.tasktracker.map.tasks.maximum");
jobConf.set("test.name.node.uri", getNameNodeUri());
jobConf.set("test.hadoop.option", "mapred.tasktracker.map.tasks.maximum");
WorkflowJobBean workflow = new WorkflowJobBean();
workflow.setProtoActionConf("<configuration/>");
LiteWorkflowApp wfApp = new LiteWorkflowApp("x", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "a"));
wfApp.addNode(new EndNodeDef("a", LiteWorkflowStoreService.LiteControlNodeHandler.class));
WorkflowInstance wi = new LiteWorkflowInstance(wfApp, jobConf, "1");
workflow.setWorkflowInstance(wi);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
jobConf.writeXml(baos);
workflow.setProtoActionConf(baos.toString());
final WorkflowActionBean action = new WorkflowActionBean();
ELEvaluator eval = Services.get().get(ELService.class).createEvaluator("workflow");
DagELFunctions.configureEvaluator(eval, workflow, action);
assertEquals(testHadoopOptionValue, eval.evaluate("${hadoop:conf(wf:conf('test.name.node.uri'), wf:conf('test.hadoop.option'))}", String.class));
}
use of org.apache.oozie.workflow.WorkflowInstance in project oozie by apache.
the class ActionEndXCommand method execute.
@Override
protected Void execute() throws CommandException {
LOG.debug("STARTED ActionEndXCommand for action " + actionId);
Configuration conf = wfJob.getWorkflowInstance().getConf();
int maxRetries = 0;
long retryInterval = 0;
if (!(executor instanceof ControlNodeActionExecutor)) {
maxRetries = conf.getInt(OozieClient.ACTION_MAX_RETRIES, executor.getMaxRetries());
retryInterval = conf.getLong(OozieClient.ACTION_RETRY_INTERVAL, executor.getRetryInterval());
}
executor.setMaxRetries(maxRetries);
executor.setRetryInterval(retryInterval);
boolean isRetry = false;
if (wfAction.getStatus() == WorkflowActionBean.Status.END_RETRY || wfAction.getStatus() == WorkflowActionBean.Status.END_MANUAL) {
isRetry = true;
}
boolean isUserRetry = false;
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry);
try {
LOG.debug("End, name [{0}] type [{1}] status[{2}] external status [{3}] signal value [{4}]", wfAction.getName(), wfAction.getType(), wfAction.getStatus(), wfAction.getExternalStatus(), wfAction.getSignalValue());
Instrumentation.Cron cron = new Instrumentation.Cron();
cron.start();
executor.end(context, wfAction);
cron.stop();
addActionCron(wfAction.getType(), cron);
incrActionCounter(wfAction.getType(), 1);
if (!context.isEnded()) {
LOG.warn(XLog.OPS, "Action Ended, ActionExecutor [{0}] must call setEndData()", executor.getType());
wfAction.setErrorInfo(END_DATA_MISSING, "Execution Ended, but End Data Missing from Action");
failJob(context);
} else {
wfAction.setRetries(0);
wfAction.setEndTime(new Date());
boolean shouldHandleUserRetry = false;
Status slaStatus = null;
switch(wfAction.getStatus()) {
case OK:
slaStatus = Status.SUCCEEDED;
break;
case KILLED:
slaStatus = Status.KILLED;
break;
case FAILED:
slaStatus = Status.FAILED;
shouldHandleUserRetry = true;
break;
case ERROR:
LOG.info("ERROR is considered as FAILED for SLA");
slaStatus = Status.KILLED;
shouldHandleUserRetry = true;
break;
default:
slaStatus = Status.FAILED;
shouldHandleUserRetry = true;
break;
}
if (!shouldHandleUserRetry || !handleUserRetry(context, wfAction)) {
SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), slaStatus, SlaAppType.WORKFLOW_ACTION);
if (slaEvent != null) {
insertList.add(slaEvent);
}
}
}
WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
DagELFunctions.setActionInfo(wfInstance, wfAction);
wfJob.setWorkflowInstance(wfInstance);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} catch (ActionExecutorException ex) {
LOG.warn("Error ending action [{0}]. ErrorType [{1}], ErrorCode [{2}], Message [{3}]", wfAction.getName(), ex.getErrorType(), ex.getErrorCode(), ex.getMessage());
wfAction.setErrorInfo(ex.getErrorCode(), ex.getMessage());
wfAction.setEndTime(null);
switch(ex.getErrorType()) {
case TRANSIENT:
if (!handleTransient(context, executor, WorkflowAction.Status.END_RETRY)) {
handleNonTransient(context, executor, WorkflowAction.Status.END_MANUAL);
wfAction.setPendingAge(new Date());
wfAction.setRetries(0);
}
wfAction.setEndTime(null);
break;
case NON_TRANSIENT:
handleNonTransient(context, executor, WorkflowAction.Status.END_MANUAL);
wfAction.setEndTime(null);
break;
case ERROR:
handleError(context, executor, COULD_NOT_END, false, WorkflowAction.Status.ERROR);
break;
case FAILED:
failJob(context);
break;
}
WorkflowInstance wfInstance = wfJob.getWorkflowInstance();
DagELFunctions.setActionInfo(wfInstance, wfAction);
wfJob.setWorkflowInstance(wfInstance);
updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_END, wfAction));
wfJob.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob));
} finally {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null);
} catch (JPAExecutorException e) {
throw new CommandException(e);
}
if (!(executor instanceof ControlNodeActionExecutor) && EventHandlerService.isEnabled()) {
generateEvent(wfAction, wfJob.getUser());
}
new SignalXCommand(jobId, actionId).call();
}
LOG.debug("ENDED ActionEndXCommand for action " + actionId);
return null;
}
Aggregations