use of org.apache.oozie.WorkflowJobBean in project oozie by apache.
the class TestWorkflowsJobGetJPAExecutor method _testGetWFInfos.
private void _testGetWFInfos() throws JPAExecutorException {
JPAService jpaService = Services.get().get(JPAService.class);
assertNotNull(jpaService);
Map<String, List<String>> filter = new HashMap<String, List<String>>();
WorkflowsJobGetJPAExecutor wfGetCmd = new WorkflowsJobGetJPAExecutor(filter, 1, 1);
WorkflowsInfo wfInfo = jpaService.execute(wfGetCmd);
System.out.println("got WorkflowsInfo " + wfInfo.getLen());
List<WorkflowJobBean> wfBeans = wfInfo.getWorkflows();
assertEquals(1, wfBeans.size());
filter = new HashMap<String, List<String>>();
wfGetCmd = new WorkflowsJobGetJPAExecutor(filter, 1, 2);
wfInfo = jpaService.execute(wfGetCmd);
wfBeans = wfInfo.getWorkflows();
assertEquals(2, wfBeans.size());
}
use of org.apache.oozie.WorkflowJobBean in project oozie by apache.
the class TestWorkflowsJobGetJPAExecutor method testGetWFInfoForTextAndStatus.
public void testGetWFInfoForTextAndStatus() throws Exception {
WorkflowJobBean workflowJob = addRecordToWfJobTable(WorkflowJob.Status.PREP, WorkflowInstance.Status.PREP);
workflowJob.setAppName("wf-name-1");
WorkflowJobQueryExecutor.getInstance().executeUpdate(WorkflowJobQuery.UPDATE_WORKFLOW, workflowJob);
Map<String, List<String>> filter = new HashMap<String, List<String>>();
List<String> textFilterList = new ArrayList<String>();
textFilterList.add("wf-name-1");
List<String> textStatusList = new ArrayList<String>();
textStatusList.add(WorkflowJob.Status.PREP.toString());
filter.put(OozieClient.FILTER_TEXT, textFilterList);
filter.put(OozieClient.FILTER_STATUS, textStatusList);
JPAService jpaService = Services.get().get(JPAService.class);
WorkflowsJobGetJPAExecutor wfGetCmd = new WorkflowsJobGetJPAExecutor(filter, 1, 20);
WorkflowsInfo wfInfo = jpaService.execute(wfGetCmd);
assertEquals(1, wfInfo.getWorkflows().size());
}
use of org.apache.oozie.WorkflowJobBean in project oozie by apache.
the class TestRecoveryService method testCoordActionRecoveryServiceForSuspended.
/**
* Tests functionality of the Recovery Service Runnable command. </p> Insert a coordinator job with SUSPENDED and
* action with SUSPENDED and workflow with RUNNING. Then, runs the recovery runnable and ensures the workflow status
* changes to SUSPENDED.
*
* @throws Exception
*/
public void testCoordActionRecoveryServiceForSuspended() throws Exception {
Date start = DateUtils.parseDateOozieTZ("2009-02-01T01:00Z");
Date end = DateUtils.parseDateOozieTZ("2009-02-02T23:59Z");
CoordinatorJobBean coordJob = addRecordToCoordJobTable(CoordinatorJob.Status.SUSPENDED, start, end, false, false, 1);
WorkflowJobBean wfJob = addRecordToWfJobTable(WorkflowJob.Status.RUNNING, WorkflowInstance.Status.RUNNING);
final String wfJobId = wfJob.getId();
addRecordToCoordActionTable(coordJob.getId(), 1, CoordinatorAction.Status.SUSPENDED, "coord-action-get.xml", wfJobId, "RUNNING", 2);
sleep(3000);
Runnable recoveryRunnable = new RecoveryRunnable(0, 1, 1);
recoveryRunnable.run();
final JPAService jpaService = Services.get().get(JPAService.class);
assertNotNull(jpaService);
waitFor(10000, new Predicate() {
public boolean evaluate() throws Exception {
WorkflowJobGetJPAExecutor wfGetCmd = new WorkflowJobGetJPAExecutor(wfJobId);
WorkflowJobBean ret = jpaService.execute(wfGetCmd);
return (ret.getStatus() == WorkflowJob.Status.SUSPENDED);
}
});
WorkflowJobGetJPAExecutor wfGetCmd = new WorkflowJobGetJPAExecutor(wfJobId);
WorkflowJobBean ret = jpaService.execute(wfGetCmd);
assertEquals(WorkflowJob.Status.SUSPENDED, ret.getStatus());
}
use of org.apache.oozie.WorkflowJobBean in project oozie by apache.
the class TestShareLibService method testAddShareLib_pig_withVersion.
@Test
public void testAddShareLib_pig_withVersion() throws Exception {
setShipLauncherInOozieConfig();
FileSystem fs = getFileSystem();
Date time = new Date(System.currentTimeMillis());
Path basePath = new Path(getOozieConfig().get(WorkflowAppService.SYSTEM_LIB_PATH));
Path libpath = new Path(basePath, ShareLibService.SHARE_LIB_PREFIX + ShareLibService.dateFormat.format(time));
fs.mkdirs(libpath);
Path pigPath = new Path(libpath.toString() + Path.SEPARATOR + "pig");
Path pigPath1 = new Path(libpath.toString() + Path.SEPARATOR + "pig_9");
Path pigPath2 = new Path(libpath.toString() + Path.SEPARATOR + "pig_10");
fs.mkdirs(pigPath);
fs.mkdirs(pigPath1);
fs.mkdirs(pigPath2);
createFiles(libpath.toString() + Path.SEPARATOR + "pig_10" + Path.SEPARATOR + "pig-10.jar");
services.init();
String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, true);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
PigActionExecutor ae = new PigActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.pig", "pig_10");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
verifyFilesInDistributedCache(DistributedCache.getCacheFiles(jobConf), "MyPig.jar", "MyOozie.jar", "pig-10.jar");
}
use of org.apache.oozie.WorkflowJobBean in project oozie by apache.
the class TestShareLibService method testConfFileAddedToActionConf.
@Test
public void testConfFileAddedToActionConf() throws Exception {
try {
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, true);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
// Test hive-site.xml in sharelib cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
ShareLibService shareLibService = services.get(ShareLibService.class);
assertEquals(shareLibService.getShareLibConfigMap().get("hive_conf").values().size(), 1);
assertEquals(shareLibService.getShareLibConfigMap().get("hive_conf").keySet().toArray(new Path[] {})[0].getName(), "hive-site.xml");
// Test hive-site.xml not in distributed cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
String actionXml = "<hive>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</hive>";
Element eActionXml = XmlUtils.parseXml(actionXml);
HiveActionExecutor ae = new HiveActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
Configuration actionConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
URI[] cacheFiles = DistributedCache.getCacheFiles(actionConf);
String cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with linkname
jobConf = ae.createBaseHadoopConf(context, eActionXml);
Properties prop = new Properties();
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
// Test hive-site.xml property in jobconf with linkname
// and with hdfs path
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", "hdfs://" + TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with non hdfs path
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with non hdfs path with
// link name
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
} finally {
getFileSystem().delete(new Path(SHARELIB_PATH), true);
}
}
Aggregations