use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.
the class FsActionExecutor method getFileSystemFor.
/**
* @param path
* @param user
* @return FileSystem
* @throws HadoopAccessorException
*/
private FileSystem getFileSystemFor(Path path, String user) throws HadoopAccessorException {
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration jobConf = has.createConfiguration(path.toUri().getAuthority());
return has.createFileSystem(user, path.toUri(), jobConf);
}
use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.
the class V2ValidateServlet method doPost.
/**
* Validate workflow definition.
*/
@Override
@SuppressWarnings("unchecked")
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
validateContentType(request, RestConstants.XML_CONTENT_TYPE);
String file = request.getParameter(RestConstants.FILE_PARAM);
String user = request.getParameter(RestConstants.USER_PARAM);
stopCron();
StringWriter stringWriter = new StringWriter();
if (file.startsWith("hdfs://")) {
try {
URI uri = new URI(file);
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(user, uri, fsConf);
Path path = new Path(uri.getPath());
IOUtils.copyCharStream(new InputStreamReader(fs.open(path)), stringWriter);
} catch (Exception e) {
throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ErrorCode.E0505, "File does not exist, " + file);
}
} else {
IOUtils.copyCharStream(new InputStreamReader(request.getInputStream()), stringWriter);
}
try {
validate(stringWriter.toString());
} catch (Exception e) {
throw new XServletException(HttpServletResponse.SC_BAD_REQUEST, ErrorCode.E0701, file + ", " + e.toString());
}
JSONObject json = createJSON("Valid workflow-app");
startCron();
sendJsonResponse(response, HttpServletResponse.SC_OK, json);
}
use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.
the class CoordSubmitXCommand method readDefinition.
/**
* Read coordinator definition.
*
* @param appPath application path.
* @return coordinator definition.
* @throws CoordinatorJobException thrown if the definition could not be read.
*/
protected String readDefinition(String appPath) throws CoordinatorJobException {
String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
// Configuration confHadoop = CoordUtils.getHadoopConf(conf);
try {
URI uri = new URI(appPath);
LOG.debug("user =" + user);
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(user, uri, fsConf);
Path appDefPath = null;
// app path could be a directory
Path path = new Path(uri.getPath());
// check file exists for dataset include file, app xml already checked
if (!fs.exists(path)) {
throw new URISyntaxException(path.toString(), "path not existed : " + path.toString());
}
if (!fs.isFile(path)) {
appDefPath = new Path(path, COORDINATOR_XML_FILE);
} else {
appDefPath = path;
}
Reader reader = new InputStreamReader(fs.open(appDefPath));
StringWriter writer = new StringWriter();
IOUtils.copyCharStream(reader, writer);
return writer.toString();
} catch (IOException ex) {
LOG.warn("IOException :" + XmlUtils.prettyPrint(conf), ex);
throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex);
} catch (URISyntaxException ex) {
LOG.warn("URISyException :" + ex.getMessage());
throw new CoordinatorJobException(ErrorCode.E1002, appPath, ex.getMessage(), ex);
} catch (HadoopAccessorException ex) {
throw new CoordinatorJobException(ex);
} catch (Exception ex) {
LOG.warn("Exception :", ex);
throw new CoordinatorJobException(ErrorCode.E1001, ex.getMessage(), ex);
}
}
use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.
the class ReRunXCommand method setupReRun.
private void setupReRun() throws CommandException {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
LogUtils.setLogInfo(wfBean);
WorkflowInstance oldWfInstance = this.wfBean.getWorkflowInstance();
WorkflowInstance newWfInstance;
String appPath = null;
WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
try {
XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
WorkflowApp app = wps.parseDef(conf, null);
XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
appPath = conf.get(OozieClient.APP_PATH);
URI uri = new URI(appPath);
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(wfBean.getUser(), uri, fsConf);
Path configDefault = null;
// app path could be a directory
Path path = new Path(uri.getPath());
if (!fs.isFile(path)) {
configDefault = new Path(path, SubmitXCommand.CONFIG_DEFAULT);
} else {
configDefault = new Path(path.getParent(), SubmitXCommand.CONFIG_DEFAULT);
}
if (fs.exists(configDefault)) {
Configuration defaultConf = new XConfiguration(fs.open(configDefault));
PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
XConfiguration.injectDefaults(defaultConf, conf);
}
PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
// Resolving all variables in the job properties. This ensures the Hadoop Configuration semantics are
// preserved. The Configuration.get function within XConfiguration.resolve() works recursively to get the
// final value corresponding to a key in the map Resetting the conf to contain all the resolved values is
// necessary to ensure propagation of Oozie properties to Hadoop calls downstream
conf = ((XConfiguration) conf).resolve();
try {
newWfInstance = workflowLib.createInstance(app, conf, jobId);
} catch (WorkflowException e) {
throw new CommandException(e);
}
String appName = ELUtils.resolveAppName(app.getName(), conf);
if (SLAService.isEnabled()) {
Element wfElem = XmlUtils.parseXml(app.getDefinition());
ELEvaluator evalSla = SubmitXCommand.createELEvaluatorForGroup(conf, "wf-sla-submit");
Element eSla = XmlUtils.getSLAElement(wfElem);
String jobSlaXml = null;
if (eSla != null) {
jobSlaXml = SubmitXCommand.resolveSla(eSla, evalSla);
}
writeSLARegistration(wfElem, jobSlaXml, newWfInstance.getId(), conf.get(SubWorkflowActionExecutor.PARENT_ID), conf.get(OozieClient.USER_NAME), appName, evalSla);
}
wfBean.setAppName(appName);
wfBean.setProtoActionConf(protoActionConf.toXmlString());
} catch (WorkflowException ex) {
throw new CommandException(ex);
} catch (IOException ex) {
throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
} catch (HadoopAccessorException ex) {
throw new CommandException(ex);
} catch (URISyntaxException ex) {
throw new CommandException(ErrorCode.E0711, appPath, ex.getMessage(), ex);
} catch (Exception ex) {
throw new CommandException(ErrorCode.E1007, ex.getMessage(), ex);
}
for (int i = 0; i < actions.size(); i++) {
// action will be used to rerun the job.
if (!nodesToSkip.contains(actions.get(i).getName()) && !(conf.getBoolean(OozieClient.RERUN_FAIL_NODES, false) && SubWorkflowActionExecutor.ACTION_TYPE.equals(actions.get(i).getType()))) {
deleteList.add(actions.get(i));
LOG.info("Deleting Action[{0}] for re-run", actions.get(i).getId());
} else {
copyActionData(newWfInstance, oldWfInstance);
}
}
wfBean.setAppPath(conf.get(OozieClient.APP_PATH));
wfBean.setConf(XmlUtils.prettyPrint(conf).toString());
wfBean.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
wfBean.setUser(conf.get(OozieClient.USER_NAME));
String group = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.JOB_ACL, OozieClient.GROUP_NAME, null);
wfBean.setGroup(group);
wfBean.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
wfBean.setEndTime(null);
wfBean.setRun(wfBean.getRun() + 1);
wfBean.setStatus(WorkflowJob.Status.PREP);
wfBean.setWorkflowInstance(newWfInstance);
try {
wfBean.setLastModifiedTime(new Date());
updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_RERUN, wfBean));
// call JPAExecutor to do the bulk writes
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, deleteList);
} catch (JPAExecutorException je) {
throw new CommandException(je);
} finally {
updateParentIfNecessary(wfBean);
}
}
use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.
the class SubmitXCommand method execute.
@Override
protected String execute() throws CommandException {
InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
try {
XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
String user = conf.get(OozieClient.USER_NAME);
URI uri = new URI(conf.get(OozieClient.APP_PATH));
HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
Configuration fsConf = has.createConfiguration(uri.getAuthority());
FileSystem fs = has.createFileSystem(user, uri, fsConf);
Path configDefault = null;
Configuration defaultConf = null;
// app path could be a directory
Path path = new Path(uri.getPath());
if (!fs.isFile(path)) {
configDefault = new Path(path, CONFIG_DEFAULT);
} else {
configDefault = new Path(path.getParent(), CONFIG_DEFAULT);
}
if (fs.exists(configDefault)) {
try {
defaultConf = new XConfiguration(fs.open(configDefault));
PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
XConfiguration.injectDefaults(defaultConf, conf);
} catch (IOException ex) {
throw new IOException("default configuration file, " + ex.getMessage(), ex);
}
}
if (defaultConf != null) {
defaultConf = resolveDefaultConfVariables(defaultConf);
}
WorkflowApp app = wps.parseDef(conf, defaultConf);
XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
// Resolving all variables in the job properties.
// This ensures the Hadoop Configuration semantics is preserved.
XConfiguration resolvedVarsConf = new XConfiguration();
for (Map.Entry<String, String> entry : conf) {
resolvedVarsConf.set(entry.getKey(), conf.get(entry.getKey()));
}
conf = resolvedVarsConf;
WorkflowInstance wfInstance;
try {
wfInstance = workflowLib.createInstance(app, conf);
} catch (WorkflowException e) {
throw new StoreException(e);
}
Configuration conf = wfInstance.getConf();
// System.out.println("WF INSTANCE CONF:");
// System.out.println(XmlUtils.prettyPrint(conf).toString());
WorkflowJobBean workflow = new WorkflowJobBean();
workflow.setId(wfInstance.getId());
workflow.setAppName(ELUtils.resolveAppName(app.getName(), conf));
workflow.setAppPath(conf.get(OozieClient.APP_PATH));
workflow.setConf(XmlUtils.prettyPrint(conf).toString());
workflow.setProtoActionConf(protoActionConf.toXmlString());
workflow.setCreatedTime(new Date());
workflow.setLastModifiedTime(new Date());
workflow.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
workflow.setStatus(WorkflowJob.Status.PREP);
workflow.setRun(0);
workflow.setUser(conf.get(OozieClient.USER_NAME));
workflow.setGroup(conf.get(OozieClient.GROUP_NAME));
workflow.setWorkflowInstance(wfInstance);
workflow.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
// Set parent id if it doesn't already have one (for subworkflows)
if (workflow.getParentId() == null) {
workflow.setParentId(conf.get(SubWorkflowActionExecutor.PARENT_ID));
}
// Set to coord action Id if workflow submitted through coordinator
if (workflow.getParentId() == null) {
workflow.setParentId(parentId);
}
LogUtils.setLogInfo(workflow);
LOG.debug("Workflow record created, Status [{0}]", workflow.getStatus());
Element wfElem = XmlUtils.parseXml(app.getDefinition());
ELEvaluator evalSla = createELEvaluatorForGroup(conf, "wf-sla-submit");
String jobSlaXml = verifySlaElements(wfElem, evalSla);
if (!dryrun) {
writeSLARegistration(wfElem, jobSlaXml, workflow.getId(), workflow.getParentId(), workflow.getUser(), workflow.getGroup(), workflow.getAppName(), LOG, evalSla);
workflow.setSlaXml(jobSlaXml);
// System.out.println("SlaXml :"+ slaXml);
// store.insertWorkflow(workflow);
insertList.add(workflow);
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService != null) {
try {
BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, null, null);
} catch (JPAExecutorException je) {
throw new CommandException(je);
}
} else {
LOG.error(ErrorCode.E0610);
return null;
}
return workflow.getId();
} else {
// Checking variable substitution for dryrun
ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, null, false, false);
Element workflowXml = XmlUtils.parseXml(app.getDefinition());
removeSlaElements(workflowXml);
String workflowXmlString = XmlUtils.removeComments(XmlUtils.prettyPrint(workflowXml).toString());
workflowXmlString = context.getELEvaluator().evaluate(workflowXmlString, String.class);
workflowXml = XmlUtils.parseXml(workflowXmlString);
Iterator<Element> it = workflowXml.getDescendants(new ElementFilter("job-xml"));
// Checking all variable substitutions in job-xml files
while (it.hasNext()) {
Element e = it.next();
String jobXml = e.getTextTrim();
Path xmlPath = new Path(workflow.getAppPath(), jobXml);
Configuration jobXmlConf = new XConfiguration(fs.open(xmlPath));
String jobXmlConfString = XmlUtils.prettyPrint(jobXmlConf).toString();
jobXmlConfString = XmlUtils.removeComments(jobXmlConfString);
context.getELEvaluator().evaluate(jobXmlConfString, String.class);
}
return "OK";
}
} catch (WorkflowException ex) {
throw new CommandException(ex);
} catch (HadoopAccessorException ex) {
throw new CommandException(ex);
} catch (Exception ex) {
throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
}
}
Aggregations