Search in sources :

Example 6 with HadoopAccessorException

use of org.apache.oozie.service.HadoopAccessorException in project oozie by apache.

the class ReRunXCommand method setupReRun.

private void setupReRun() throws CommandException {
    InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
    LogUtils.setLogInfo(wfBean);
    WorkflowInstance oldWfInstance = this.wfBean.getWorkflowInstance();
    WorkflowInstance newWfInstance;
    String appPath = null;
    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    try {
        XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
        WorkflowApp app = wps.parseDef(conf, null);
        XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
        WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
        appPath = conf.get(OozieClient.APP_PATH);
        URI uri = new URI(appPath);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(wfBean.getUser(), uri, fsConf);
        Path configDefault = null;
        // app path could be a directory
        Path path = new Path(uri.getPath());
        if (!fs.isFile(path)) {
            configDefault = new Path(path, SubmitXCommand.CONFIG_DEFAULT);
        } else {
            configDefault = new Path(path.getParent(), SubmitXCommand.CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            Configuration defaultConf = new XConfiguration(fs.open(configDefault));
            PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
            XConfiguration.injectDefaults(defaultConf, conf);
        }
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
        // Resolving all variables in the job properties. This ensures the Hadoop Configuration semantics are
        // preserved. The Configuration.get function within XConfiguration.resolve() works recursively to get the
        // final value corresponding to a key in the map Resetting the conf to contain all the resolved values is
        // necessary to ensure propagation of Oozie properties to Hadoop calls downstream
        conf = ((XConfiguration) conf).resolve();
        try {
            newWfInstance = workflowLib.createInstance(app, conf, jobId);
        } catch (WorkflowException e) {
            throw new CommandException(e);
        }
        String appName = ELUtils.resolveAppName(app.getName(), conf);
        if (SLAService.isEnabled()) {
            Element wfElem = XmlUtils.parseXml(app.getDefinition());
            ELEvaluator evalSla = SubmitXCommand.createELEvaluatorForGroup(conf, "wf-sla-submit");
            Element eSla = XmlUtils.getSLAElement(wfElem);
            String jobSlaXml = null;
            if (eSla != null) {
                jobSlaXml = SubmitXCommand.resolveSla(eSla, evalSla);
            }
            writeSLARegistration(wfElem, jobSlaXml, newWfInstance.getId(), conf.get(SubWorkflowActionExecutor.PARENT_ID), conf.get(OozieClient.USER_NAME), appName, evalSla);
        }
        wfBean.setAppName(appName);
        wfBean.setProtoActionConf(protoActionConf.toXmlString());
    } catch (WorkflowException ex) {
        throw new CommandException(ex);
    } catch (IOException ex) {
        throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
    } catch (HadoopAccessorException ex) {
        throw new CommandException(ex);
    } catch (URISyntaxException ex) {
        throw new CommandException(ErrorCode.E0711, appPath, ex.getMessage(), ex);
    } catch (Exception ex) {
        throw new CommandException(ErrorCode.E1007, ex.getMessage(), ex);
    }
    for (int i = 0; i < actions.size(); i++) {
        // action will be used to rerun the job.
        if (!nodesToSkip.contains(actions.get(i).getName()) && !(conf.getBoolean(OozieClient.RERUN_FAIL_NODES, false) && SubWorkflowActionExecutor.ACTION_TYPE.equals(actions.get(i).getType()))) {
            deleteList.add(actions.get(i));
            LOG.info("Deleting Action[{0}] for re-run", actions.get(i).getId());
        } else {
            copyActionData(newWfInstance, oldWfInstance);
        }
    }
    wfBean.setAppPath(conf.get(OozieClient.APP_PATH));
    wfBean.setConf(XmlUtils.prettyPrint(conf).toString());
    wfBean.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
    wfBean.setUser(conf.get(OozieClient.USER_NAME));
    String group = ConfigUtils.getWithDeprecatedCheck(conf, OozieClient.JOB_ACL, OozieClient.GROUP_NAME, null);
    wfBean.setGroup(group);
    wfBean.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
    wfBean.setEndTime(null);
    wfBean.setRun(wfBean.getRun() + 1);
    wfBean.setStatus(WorkflowJob.Status.PREP);
    wfBean.setWorkflowInstance(newWfInstance);
    try {
        wfBean.setLastModifiedTime(new Date());
        updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_RERUN, wfBean));
        // call JPAExecutor to do the bulk writes
        BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(null, updateList, deleteList);
    } catch (JPAExecutorException je) {
        throw new CommandException(je);
    } finally {
        updateParentIfNecessary(wfBean);
    }
}
Also used : WorkflowApp(org.apache.oozie.workflow.WorkflowApp) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) Element(org.jdom.Element) URISyntaxException(java.net.URISyntaxException) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) URI(java.net.URI) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) FileSystem(org.apache.hadoop.fs.FileSystem) ELEvaluator(org.apache.oozie.util.ELEvaluator) Path(org.apache.hadoop.fs.Path) WorkflowLib(org.apache.oozie.workflow.WorkflowLib) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) WorkflowStoreService(org.apache.oozie.service.WorkflowStoreService) WorkflowException(org.apache.oozie.workflow.WorkflowException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) IOException(java.io.IOException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) URISyntaxException(java.net.URISyntaxException) JDOMException(org.jdom.JDOMException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) WorkflowException(org.apache.oozie.workflow.WorkflowException) Date(java.util.Date) XConfiguration(org.apache.oozie.util.XConfiguration) WorkflowJobQuery(org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery)

Example 7 with HadoopAccessorException

use of org.apache.oozie.service.HadoopAccessorException in project oozie by apache.

the class SubmitXCommand method execute.

@Override
protected String execute() throws CommandException {
    InstrumentUtils.incrJobCounter(getName(), 1, getInstrumentation());
    WorkflowAppService wps = Services.get().get(WorkflowAppService.class);
    try {
        XLog.Info.get().setParameter(DagXLogInfoService.TOKEN, conf.get(OozieClient.LOG_TOKEN));
        String user = conf.get(OozieClient.USER_NAME);
        URI uri = new URI(conf.get(OozieClient.APP_PATH));
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(user, uri, fsConf);
        Path configDefault = null;
        Configuration defaultConf = null;
        // app path could be a directory
        Path path = new Path(uri.getPath());
        if (!fs.isFile(path)) {
            configDefault = new Path(path, CONFIG_DEFAULT);
        } else {
            configDefault = new Path(path.getParent(), CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            try {
                defaultConf = new XConfiguration(fs.open(configDefault));
                PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
                XConfiguration.injectDefaults(defaultConf, conf);
            } catch (IOException ex) {
                throw new IOException("default configuration file, " + ex.getMessage(), ex);
            }
        }
        if (defaultConf != null) {
            defaultConf = resolveDefaultConfVariables(defaultConf);
        }
        WorkflowApp app = wps.parseDef(conf, defaultConf);
        XConfiguration protoActionConf = wps.createProtoActionConf(conf, true);
        WorkflowLib workflowLib = Services.get().get(WorkflowStoreService.class).getWorkflowLibWithNoDB();
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
        // Resolving all variables in the job properties.
        // This ensures the Hadoop Configuration semantics is preserved.
        XConfiguration resolvedVarsConf = new XConfiguration();
        for (Map.Entry<String, String> entry : conf) {
            resolvedVarsConf.set(entry.getKey(), conf.get(entry.getKey()));
        }
        conf = resolvedVarsConf;
        WorkflowInstance wfInstance;
        try {
            wfInstance = workflowLib.createInstance(app, conf);
        } catch (WorkflowException e) {
            throw new StoreException(e);
        }
        Configuration conf = wfInstance.getConf();
        // System.out.println("WF INSTANCE CONF:");
        // System.out.println(XmlUtils.prettyPrint(conf).toString());
        WorkflowJobBean workflow = new WorkflowJobBean();
        workflow.setId(wfInstance.getId());
        workflow.setAppName(ELUtils.resolveAppName(app.getName(), conf));
        workflow.setAppPath(conf.get(OozieClient.APP_PATH));
        workflow.setConf(XmlUtils.prettyPrint(conf).toString());
        workflow.setProtoActionConf(protoActionConf.toXmlString());
        workflow.setCreatedTime(new Date());
        workflow.setLastModifiedTime(new Date());
        workflow.setLogToken(conf.get(OozieClient.LOG_TOKEN, ""));
        workflow.setStatus(WorkflowJob.Status.PREP);
        workflow.setRun(0);
        workflow.setUser(conf.get(OozieClient.USER_NAME));
        workflow.setGroup(conf.get(OozieClient.GROUP_NAME));
        workflow.setWorkflowInstance(wfInstance);
        workflow.setExternalId(conf.get(OozieClient.EXTERNAL_ID));
        // Set parent id if it doesn't already have one (for subworkflows)
        if (workflow.getParentId() == null) {
            workflow.setParentId(conf.get(SubWorkflowActionExecutor.PARENT_ID));
        }
        // Set to coord action Id if workflow submitted through coordinator
        if (workflow.getParentId() == null) {
            workflow.setParentId(parentId);
        }
        LogUtils.setLogInfo(workflow);
        LOG.debug("Workflow record created, Status [{0}]", workflow.getStatus());
        Element wfElem = XmlUtils.parseXml(app.getDefinition());
        ELEvaluator evalSla = createELEvaluatorForGroup(conf, "wf-sla-submit");
        String jobSlaXml = verifySlaElements(wfElem, evalSla);
        if (!dryrun) {
            writeSLARegistration(wfElem, jobSlaXml, workflow.getId(), workflow.getParentId(), workflow.getUser(), workflow.getGroup(), workflow.getAppName(), LOG, evalSla);
            workflow.setSlaXml(jobSlaXml);
            // System.out.println("SlaXml :"+ slaXml);
            // store.insertWorkflow(workflow);
            insertList.add(workflow);
            JPAService jpaService = Services.get().get(JPAService.class);
            if (jpaService != null) {
                try {
                    BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, null, null);
                } catch (JPAExecutorException je) {
                    throw new CommandException(je);
                }
            } else {
                LOG.error(ErrorCode.E0610);
                return null;
            }
            return workflow.getId();
        } else {
            // Checking variable substitution for dryrun
            ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(workflow, null, false, false);
            Element workflowXml = XmlUtils.parseXml(app.getDefinition());
            removeSlaElements(workflowXml);
            String workflowXmlString = XmlUtils.removeComments(XmlUtils.prettyPrint(workflowXml).toString());
            workflowXmlString = context.getELEvaluator().evaluate(workflowXmlString, String.class);
            workflowXml = XmlUtils.parseXml(workflowXmlString);
            Iterator<Element> it = workflowXml.getDescendants(new ElementFilter("job-xml"));
            // Checking all variable substitutions in job-xml files
            while (it.hasNext()) {
                Element e = it.next();
                String jobXml = e.getTextTrim();
                Path xmlPath = new Path(workflow.getAppPath(), jobXml);
                Configuration jobXmlConf = new XConfiguration(fs.open(xmlPath));
                String jobXmlConfString = XmlUtils.prettyPrint(jobXmlConf).toString();
                jobXmlConfString = XmlUtils.removeComments(jobXmlConfString);
                context.getELEvaluator().evaluate(jobXmlConfString, String.class);
            }
            return "OK";
        }
    } catch (WorkflowException ex) {
        throw new CommandException(ex);
    } catch (HadoopAccessorException ex) {
        throw new CommandException(ex);
    } catch (Exception ex) {
        throw new CommandException(ErrorCode.E0803, ex.getMessage(), ex);
    }
}
Also used : WorkflowApp(org.apache.oozie.workflow.WorkflowApp) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) Element(org.jdom.Element) WorkflowInstance(org.apache.oozie.workflow.WorkflowInstance) URI(java.net.URI) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) FileSystem(org.apache.hadoop.fs.FileSystem) ELEvaluator(org.apache.oozie.util.ELEvaluator) ActionExecutorContext(org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext) Path(org.apache.hadoop.fs.Path) WorkflowLib(org.apache.oozie.workflow.WorkflowLib) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) WorkflowStoreService(org.apache.oozie.service.WorkflowStoreService) WorkflowException(org.apache.oozie.workflow.WorkflowException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) CommandException(org.apache.oozie.command.CommandException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) WorkflowJobBean(org.apache.oozie.WorkflowJobBean) Date(java.util.Date) JPAExecutorException(org.apache.oozie.executor.jpa.JPAExecutorException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) StoreException(org.apache.oozie.store.StoreException) IOException(java.io.IOException) WorkflowException(org.apache.oozie.workflow.WorkflowException) StoreException(org.apache.oozie.store.StoreException) XConfiguration(org.apache.oozie.util.XConfiguration) ElementFilter(org.jdom.filter.ElementFilter) JPAService(org.apache.oozie.service.JPAService) Map(java.util.Map)

Example 8 with HadoopAccessorException

use of org.apache.oozie.service.HadoopAccessorException in project oozie by apache.

the class JavaActionExecutor method setupActionConf.

Configuration setupActionConf(Configuration actionConf, Context context, Element actionXml, Path appPath) throws ActionExecutorException {
    try {
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        XConfiguration actionDefaults = has.createActionDefaultConf(actionConf.get(HADOOP_YARN_RM), getType());
        XConfiguration.copy(actionDefaults, actionConf);
        has.checkSupportedFilesystem(appPath.toUri());
        // Set the Java Main Class for the Java action to give to the Java launcher
        setJavaMain(actionConf, actionXml);
        parseJobXmlAndConfiguration(context, actionXml, appPath, actionConf);
        // set cancel.delegation.token in actionConf that child job doesn't cancel delegation token
        actionConf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
        setRootLoggerLevel(actionConf);
        return actionConf;
    } catch (IOException ex) {
        throw convertException(ex);
    } catch (HadoopAccessorException ex) {
        throw convertException(ex);
    } catch (URISyntaxException ex) {
        throw convertException(ex);
    }
}
Also used : XConfiguration(org.apache.oozie.util.XConfiguration) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService)

Example 9 with HadoopAccessorException

use of org.apache.oozie.service.HadoopAccessorException in project oozie by apache.

the class BundleSubmitXCommand method mergeDefaultConfig.

/**
 * Merge default configuration with user-defined configuration.
 *
 * @throws CommandException thrown if failed to merge configuration
 */
protected void mergeDefaultConfig() throws CommandException {
    Path configDefault = null;
    try {
        String bundleAppPathStr = conf.get(OozieClient.BUNDLE_APP_PATH);
        Path bundleAppPath = new Path(bundleAppPathStr);
        String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(bundleAppPath.toUri().getAuthority());
        FileSystem fs = has.createFileSystem(user, bundleAppPath.toUri(), fsConf);
        // app path could be a directory
        if (!fs.isFile(bundleAppPath)) {
            configDefault = new Path(bundleAppPath, CONFIG_DEFAULT);
        } else {
            configDefault = new Path(bundleAppPath.getParent(), CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            Configuration defaultConf = new XConfiguration(fs.open(configDefault));
            PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
            XConfiguration.injectDefaults(defaultConf, conf);
        } else {
            LOG.info("configDefault Doesn't exist " + configDefault);
        }
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
    } catch (IOException e) {
        throw new CommandException(ErrorCode.E0702, e.getMessage() + " : Problem reading default config " + configDefault, e);
    } catch (HadoopAccessorException e) {
        throw new CommandException(e);
    }
    LOG.debug("Merged CONF :" + XmlUtils.prettyPrint(conf).toString());
}
Also used : Path(org.apache.hadoop.fs.Path) XConfiguration(org.apache.oozie.util.XConfiguration) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) CommandException(org.apache.oozie.command.CommandException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService)

Example 10 with HadoopAccessorException

use of org.apache.oozie.service.HadoopAccessorException in project oozie by apache.

the class BundleSubmitXCommand method readDefinition.

/**
 * Read bundle definition.
 *
 * @param appPath application path.
 * @return bundle definition.
 * @throws BundleJobException thrown if the definition could not be read.
 */
protected String readDefinition(String appPath) throws BundleJobException {
    String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
    // Configuration confHadoop = CoordUtils.getHadoopConf(conf);
    try {
        URI uri = new URI(appPath);
        LOG.debug("user =" + user);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = has.createFileSystem(user, uri, fsConf);
        Path appDefPath = null;
        // app path could be a directory
        Path path = new Path(uri.getPath());
        if (!fs.isFile(path)) {
            appDefPath = new Path(path, BUNDLE_XML_FILE);
        } else {
            appDefPath = path;
        }
        Reader reader = new InputStreamReader(fs.open(appDefPath));
        StringWriter writer = new StringWriter();
        IOUtils.copyCharStream(reader, writer);
        return writer.toString();
    } catch (IOException ex) {
        LOG.warn("IOException :" + XmlUtils.prettyPrint(conf), ex);
        throw new BundleJobException(ErrorCode.E1301, ex.getMessage(), ex);
    } catch (URISyntaxException ex) {
        LOG.warn("URISyException :" + ex.getMessage());
        throw new BundleJobException(ErrorCode.E1302, appPath, ex.getMessage(), ex);
    } catch (HadoopAccessorException ex) {
        throw new BundleJobException(ex);
    } catch (Exception ex) {
        LOG.warn("Exception :", ex);
        throw new BundleJobException(ErrorCode.E1301, ex.getMessage(), ex);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) InputStreamReader(java.io.InputStreamReader) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) Reader(java.io.Reader) InputStreamReader(java.io.InputStreamReader) StringReader(java.io.StringReader) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) URI(java.net.URI) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) URISyntaxException(java.net.URISyntaxException) JDOMException(org.jdom.JDOMException) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) CommandException(org.apache.oozie.command.CommandException) SAXException(org.xml.sax.SAXException) PreconditionException(org.apache.oozie.command.PreconditionException) IOException(java.io.IOException) StringWriter(java.io.StringWriter) FileSystem(org.apache.hadoop.fs.FileSystem)

Aggregations

HadoopAccessorException (org.apache.oozie.service.HadoopAccessorException)15 IOException (java.io.IOException)14 HadoopAccessorService (org.apache.oozie.service.HadoopAccessorService)12 FileSystem (org.apache.hadoop.fs.FileSystem)10 XConfiguration (org.apache.oozie.util.XConfiguration)10 URISyntaxException (java.net.URISyntaxException)9 Configuration (org.apache.hadoop.conf.Configuration)9 Path (org.apache.hadoop.fs.Path)9 CommandException (org.apache.oozie.command.CommandException)7 URI (java.net.URI)6 JPAExecutorException (org.apache.oozie.executor.jpa.JPAExecutorException)4 Element (org.jdom.Element)4 JDOMException (org.jdom.JDOMException)4 StringReader (java.io.StringReader)3 Date (java.util.Date)3 ActionExecutorException (org.apache.oozie.action.ActionExecutorException)3 WorkflowException (org.apache.oozie.workflow.WorkflowException)3 WorkflowInstance (org.apache.oozie.workflow.WorkflowInstance)3 InputStreamReader (java.io.InputStreamReader)2 Reader (java.io.Reader)2