use of org.apache.oozie.action.hadoop.TestJavaActionExecutor in project oozie by apache.
the class TestShareLibService method testAddShareLib_pig_withVersion.
@Test
public void testAddShareLib_pig_withVersion() throws Exception {
setShipLauncherInOozieConfig();
FileSystem fs = getFileSystem();
Date time = new Date(System.currentTimeMillis());
Path basePath = new Path(getOozieConfig().get(WorkflowAppService.SYSTEM_LIB_PATH));
Path libpath = new Path(basePath, ShareLibService.SHARE_LIB_PREFIX + ShareLibService.dateFormat.format(time));
fs.mkdirs(libpath);
Path pigPath = new Path(libpath.toString() + Path.SEPARATOR + "pig");
Path pigPath1 = new Path(libpath.toString() + Path.SEPARATOR + "pig_9");
Path pigPath2 = new Path(libpath.toString() + Path.SEPARATOR + "pig_10");
fs.mkdirs(pigPath);
fs.mkdirs(pigPath1);
fs.mkdirs(pigPath2);
createFiles(libpath.toString() + Path.SEPARATOR + "pig_10" + Path.SEPARATOR + "pig-10.jar");
services.init();
String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<property><name>oozie.action.sharelib.for.pig</name><value>pig_10</value></property>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, true);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
PigActionExecutor ae = new PigActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.pig", "pig_10");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
verifyFilesInDistributedCache(DistributedCache.getCacheFiles(jobConf), "MyPig.jar", "MyOozie.jar", "pig-10.jar");
}
use of org.apache.oozie.action.hadoop.TestJavaActionExecutor in project oozie by apache.
the class TestShareLibService method testConfFileAddedToActionConf.
@Test
public void testConfFileAddedToActionConf() throws Exception {
try {
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, true);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
// Test hive-site.xml in sharelib cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
ShareLibService shareLibService = services.get(ShareLibService.class);
assertEquals(shareLibService.getShareLibConfigMap().get("hive_conf").values().size(), 1);
assertEquals(shareLibService.getShareLibConfigMap().get("hive_conf").keySet().toArray(new Path[] {})[0].getName(), "hive-site.xml");
// Test hive-site.xml not in distributed cache
setupSharelibConf("hive-site.xml", "oozie.hive_conf");
String actionXml = "<hive>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</hive>";
Element eActionXml = XmlUtils.parseXml(actionXml);
HiveActionExecutor ae = new HiveActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
Configuration actionConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
URI[] cacheFiles = DistributedCache.getCacheFiles(actionConf);
String cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with linkname
jobConf = ae.createBaseHadoopConf(context, eActionXml);
Properties prop = new Properties();
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
// Test hive-site.xml property in jobconf with linkname
// and with hdfs path
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", "hdfs://" + TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with non hdfs path
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
// Test hive-site.xml property in jobconf with non hdfs path with
// link name
prop = new Properties();
jobConf = ae.createBaseHadoopConf(context, eActionXml);
actionConf = ae.createBaseHadoopConf(context, eActionXml);
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
jobConf.set("oozie.action.sharelib.for.hive", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), "test");
cacheFiles = DistributedCache.getCacheFiles(actionConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertFalse(cacheFilesStr.contains("hive-site.xml"));
} finally {
getFileSystem().delete(new Path(SHARELIB_PATH), true);
}
}
use of org.apache.oozie.action.hadoop.TestJavaActionExecutor in project oozie by apache.
the class TestShareLibService method testConfFileAddedToDistributedCache.
@Test
public void testConfFileAddedToDistributedCache() throws Exception {
try {
Properties prop = new Properties();
prop.put("oozie.hive_conf", TEST_HDFS_HOME + SHARELIB_PATH + "hive-site.xml#hive-site.xml");
setupSharelibConf("hive-site.xml", "oozie.hive_conf", prop);
String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<script>test</script>" + "</pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, true);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
PigActionExecutor ae = new PigActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.pig", "hive_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
URI[] cacheFiles = DistributedCache.getCacheFiles(jobConf);
String cacheFilesStr = Arrays.toString(cacheFiles);
assertEquals(jobConf.get("oozie.hive_conf-sharelib-test"), null);
assertTrue(URLDecoder.decode(cacheFilesStr).contains("hive-site.xml#hive-site.xml"));
setupSharelibConf("hbase-site.xml", "oozie.hbase_conf");
jobConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.pig", "hbase_conf");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
cacheFiles = DistributedCache.getCacheFiles(jobConf);
cacheFilesStr = Arrays.toString(cacheFiles);
assertTrue(cacheFilesStr.contains("hbase-site.xml"));
} finally {
getFileSystem().delete(new Path(SHARELIB_PATH), true);
}
}
use of org.apache.oozie.action.hadoop.TestJavaActionExecutor in project oozie by apache.
the class TestShareLibService method setUpPigJob.
private URI[] setUpPigJob(boolean useSystemSharelib) throws Exception {
services.init();
String actionXml = "<pig>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node></pig>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
protoConf.setBoolean(OozieClient.USE_SYSTEM_LIBPATH, useSystemSharelib);
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(protoConf).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
PigActionExecutor ae = new PigActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
jobConf.set("oozie.action.sharelib.for.pig", "pig");
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
return DistributedCache.getCacheFiles(jobConf);
}
use of org.apache.oozie.action.hadoop.TestJavaActionExecutor in project oozie by apache.
the class TestShareLibService method testAddShareLibDistributedCache.
@Test
public void testAddShareLibDistributedCache() throws Exception {
setShipLauncherInOozieConfig();
services.init();
String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "</java>";
Element eActionXml = XmlUtils.parseXml(actionXml);
XConfiguration protoConf = new XConfiguration();
protoConf.set(WorkflowAppService.HADOOP_USER, getTestUser());
WorkflowJobBean wfj = new WorkflowJobBean();
wfj.setProtoActionConf(XmlUtils.prettyPrint(protoConf).toString());
wfj.setConf(XmlUtils.prettyPrint(new XConfiguration()).toString());
Context context = new TestJavaActionExecutor().new Context(wfj, new WorkflowActionBean());
PigActionExecutor ae = new PigActionExecutor();
Configuration jobConf = ae.createBaseHadoopConf(context, eActionXml);
ae.setLibFilesArchives(context, eActionXml, new Path("hdfs://dummyAppPath"), jobConf);
verifyFilesInDistributedCache(DistributedCache.getCacheFiles(jobConf), MyPig.class.getName() + ".jar", MyOozie.class.getName() + ".jar");
}
Aggregations