use of org.apache.oozie.workflow.lite.StartNodeDef in project oozie by apache.
the class TestDBWorkflowStore method _testInsertWF.
private void _testInsertWF() throws Exception {
store.beginTrx();
WorkflowApp app = new LiteWorkflowApp("testApp", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "end")).addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
Configuration conf1 = new Configuration();
conf1.set(OozieClient.APP_PATH, "testPath");
conf1.set(OozieClient.LOG_TOKEN, "testToken");
conf1.set(OozieClient.USER_NAME, getTestUser());
wfBean1 = createWorkflow(app, conf1, "auth");
Configuration conf2 = new Configuration();
conf2.set(OozieClient.APP_PATH, "testPath");
conf2.set(OozieClient.LOG_TOKEN, "testToken");
conf2.set(OozieClient.USER_NAME, getTestUser2());
wfBean2 = createWorkflow(app, conf2, "auth");
store.insertWorkflow(wfBean1);
store.insertWorkflow(wfBean2);
store.commitTrx();
/*
* SqlStatement s = getCount(OozieTable.WORKFLOWS); ResultSet rs =
* s.prepareAndSetValues(conn).executeQuery(); rs.next(); assertEquals(2,
* rs.getInt(1)); rs.close();
*
* s = getCount(OozieTable.WORKFLOWS).where(isEqual(WF_id, wfBean1.getId())); rs =
* s.prepareAndSetValues(conn).executeQuery(); rs.next(); assertEquals(1,
* rs.getInt(1)); rs.close();
*
* s = getCount(OozieTable.WORKFLOWS).where(isEqual(WF_id, wfBean2.getId())); rs =
* s.prepareAndSetValues(conn).executeQuery(); rs.next(); assertEquals(1,
* rs.getInt(1)); rs.close();
*/
}
use of org.apache.oozie.workflow.lite.StartNodeDef in project oozie by apache.
the class TestEventGeneration method _createWorkflowJob.
private WorkflowJobBean _createWorkflowJob() throws Exception {
LiteWorkflowApp app = new LiteWorkflowApp("my-app", "<workflow-app/>", new StartNodeDef(TestControlNodeHandler.class, "one")).addNode(new ActionNodeDef("one", "<java></java>", TestActionNodeHandler.class, "end", "end")).addNode(new EndNodeDef("end", TestControlNodeHandler.class));
Configuration conf = new Configuration();
Path appUri = new Path(getAppPath(), "workflow.xml");
conf.set(OozieClient.APP_PATH, appUri.toString());
conf.set(OozieClient.LOG_TOKEN, "testToken");
conf.set(OozieClient.USER_NAME, getTestUser());
WorkflowJobBean workflow = createWorkflow(app, conf, WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
String executionPath = "/";
assertNotNull(jpaService);
WorkflowJobInsertJPAExecutor wfInsertCmd = new WorkflowJobInsertJPAExecutor(workflow);
jpaService.execute(wfInsertCmd);
WorkflowActionBean wfAction = addRecordToWfActionTable(workflow.getId(), "one", WorkflowAction.Status.OK, executionPath, true);
wfAction.setPending();
wfAction.setSignalValue(WorkflowAction.Status.OK.name());
WorkflowActionQueryExecutor.getInstance().executeUpdate(WorkflowActionQuery.UPDATE_ACTION, wfAction);
return workflow;
}
use of org.apache.oozie.workflow.lite.StartNodeDef in project oozie by apache.
the class TestEventGeneration method testWorkflowJobEvent.
@Test
public void testWorkflowJobEvent() throws Exception {
assertEquals(0, queue.size());
WorkflowApp app = new LiteWorkflowApp("testApp", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "fs-node")).addNode(new ActionNodeDef("fs-node", "", TestLiteWorkflowLib.TestActionNodeHandler.class, "end", "end")).addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
WorkflowJobBean job = addRecordToWfJobTable(app, WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
// Starting job
new StartXCommand(job.getId()).call();
WorkflowJobGetJPAExecutor wfJobGetCmd = new WorkflowJobGetJPAExecutor(job.getId());
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.RUNNING, job.getStatus());
assertEquals(1, queue.size());
JobEvent event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.STARTED, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(0, queue.size());
// Suspending job
new SuspendXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.SUSPENDED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.SUSPEND, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(0, queue.size());
// Resuming job
new ResumeXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.RUNNING, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(0, queue.size());
// Killing job
new KillXCommand(job.getId()).call();
job = jpaService.execute(wfJobGetCmd);
assertEquals(WorkflowJob.Status.KILLED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(EventStatus.FAILURE, event.getEventStatus());
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(job.getEndTime(), event.getEndTime());
assertEquals(0, queue.size());
// Successful job (testing SignalX)
job = _createWorkflowJob();
LiteWorkflowInstance wfInstance = (LiteWorkflowInstance) job.getWorkflowInstance();
wfInstance.start();
job.setWorkflowInstance(wfInstance);
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, job);
WorkflowActionBean wfAction = jpaService.execute(new WorkflowActionGetJPAExecutor(job.getId() + "@one"));
new SignalXCommand(job.getId(), wfAction.getId()).call();
job = jpaService.execute(new WorkflowJobGetJPAExecutor(job.getId()));
assertEquals(WorkflowJob.Status.SUCCEEDED, job.getStatus());
assertEquals(1, queue.size());
event = (JobEvent) queue.poll();
assertNotNull(event);
assertEquals(AppType.WORKFLOW_JOB, event.getAppType());
assertEquals(job.getId(), event.getId());
assertEquals(job.getUser(), event.getUser());
assertEquals(job.getAppName(), event.getAppName());
assertEquals(job.getStartTime(), event.getStartTime());
assertEquals(job.getEndTime(), event.getEndTime());
}
use of org.apache.oozie.workflow.lite.StartNodeDef in project oozie by apache.
the class TestHCatELFunctions method testHCatTableExists.
@Test
public void testHCatTableExists() throws Exception {
dropTable("db1", "table1", true);
dropDatabase("db1", true);
createDatabase("db1");
createTable("db1", "table1");
Configuration protoConf = new Configuration();
protoConf.set(OozieClient.USER_NAME, getTestUser());
protoConf.set("hadoop.job.ugi", getTestUser() + "," + "group");
Configuration conf = new XConfiguration();
conf.set(OozieClient.APP_PATH, "appPath");
conf.set(OozieClient.USER_NAME, getTestUser());
conf.set("test.dir", getTestCaseDir());
conf.set("table1", getHCatURI("db1", "table1").toString());
conf.set("table2", getHCatURI("db1", "table2").toString());
LiteWorkflowApp def = new LiteWorkflowApp("name", "<workflow-app/>", new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "end")).addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
LiteWorkflowInstance job = new LiteWorkflowInstance(def, conf, "wfId");
WorkflowJobBean wf = new WorkflowJobBean();
wf.setId(job.getId());
wf.setAppName("name");
wf.setAppPath("appPath");
wf.setUser(getTestUser());
wf.setGroup("group");
wf.setWorkflowInstance(job);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
protoConf.writeXml(baos);
wf.setProtoActionConf(baos.toString());
WorkflowActionBean action = new WorkflowActionBean();
action.setId("actionId");
action.setName("actionName");
ELEvaluator eval = Services.get().get(ELService.class).createEvaluator("workflow");
DagELFunctions.configureEvaluator(eval, wf, action);
assertEquals(true, (boolean) eval.evaluate("${hcat:exists(wf:conf('table1'))}", Boolean.class));
assertEquals(false, (boolean) eval.evaluate("${hcat:exists(wf:conf('table2'))}", Boolean.class));
dropTable("db1", "table1", true);
dropDatabase("db1", true);
}
use of org.apache.oozie.workflow.lite.StartNodeDef in project oozie by apache.
the class TestJavaActionExecutor method addRecordToWfJobTable.
private WorkflowJobBean addRecordToWfJobTable(String wfId, String wfxml, Map<String, String> otherProps) throws Exception {
WorkflowApp app = new LiteWorkflowApp("testApp", wfxml, new StartNodeDef(LiteWorkflowStoreService.LiteControlNodeHandler.class, "start")).addNode(new EndNodeDef("end", LiteWorkflowStoreService.LiteControlNodeHandler.class));
Configuration conf = getHadoopAccessorService().createConfiguration(new URI(getNameNodeUri()).getAuthority());
conf.set(OozieClient.APP_PATH, getNameNodeUri() + "/testPath");
conf.set(OozieClient.LOG_TOKEN, "testToken");
conf.set(OozieClient.USER_NAME, getTestUser());
conf.set("property3", "prop3");
conf.set("value3", "val3");
if (otherProps != null) {
for (Map.Entry<String, String> ent : otherProps.entrySet()) {
conf.set(ent.getKey(), ent.getValue());
}
}
WorkflowJobBean wfBean = createWorkflow(app, conf, "auth");
wfBean.setId(wfId);
wfBean.setStatus(WorkflowJob.Status.SUCCEEDED);
WorkflowActionBean action = new WorkflowActionBean();
action.setName("test");
action.setId(Services.get().get(UUIDService.class).generateChildId(wfBean.getId(), "test"));
wfBean.getActions().add(action);
return wfBean;
}
Aggregations