Search in sources :

Example 11 with HadoopAccessorService

use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.

the class TestJavaActionExecutor method testChildKill.

public void testChildKill() throws Exception {
    final JobConf clusterConf = createJobConf();
    FileSystem fileSystem = FileSystem.get(clusterConf);
    Path confFile = new Path("/tmp/cluster-conf.xml");
    OutputStream out = fileSystem.create(confFile);
    clusterConf.writeXml(out);
    out.close();
    String confFileName = fileSystem.makeQualified(confFile).toString() + "#core-site.xml";
    final String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>" + getNameNodeUri() + "</name-node>" + "<main-class> " + SleepJob.class.getName() + " </main-class>" + "<arg>-mt</arg>" + "<arg>300000</arg>" + "<archive>" + confFileName + "</archive>" + "</java>";
    final Context context = createContext(actionXml, null);
    final String runningJob = submitAction(context);
    YarnApplicationState state = waitUntilYarnAppState(runningJob, EnumSet.of(YarnApplicationState.RUNNING));
    assertEquals(YarnApplicationState.RUNNING, state);
    WorkflowJob wfJob = context.getWorkflow();
    Configuration conf = null;
    if (wfJob.getConf() != null) {
        conf = new XConfiguration(new StringReader(wfJob.getConf()));
    }
    String launcherTag = LauncherHelper.getActionYarnTag(conf, wfJob.getParentId(), context.getAction());
    JavaActionExecutor ae = new JavaActionExecutor();
    final Configuration jobConf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
    jobConf.set(LauncherMain.CHILD_MAPREDUCE_JOB_TAGS, LauncherHelper.getTag(launcherTag));
    jobConf.setLong(LauncherMain.OOZIE_JOB_LAUNCH_TIME, context.getAction().getStartTime().getTime());
    // We have to use a proper UGI for retrieving the child apps, because the WF is
    // submitted as a test user, not as the current login user
    UserGroupInformationService ugiService = Services.get().get(UserGroupInformationService.class);
    final UserGroupInformation ugi = ugiService.getProxyUser(getTestUser());
    final Set<ApplicationId> childSet = new HashSet<>();
    // wait until we have a child MR job
    waitFor(60_000, new Predicate() {

        @Override
        public boolean evaluate() throws Exception {
            return ugi.doAs(new PrivilegedExceptionAction<Boolean>() {

                @Override
                public Boolean run() throws Exception {
                    childSet.clear();
                    childSet.addAll(LauncherMain.getChildYarnJobs(jobConf));
                    return childSet.size() > 0;
                }
            });
        }
    });
    assertEquals(1, childSet.size());
    // kill the action - based on the job tag, the SleepJob is expected to be killed too
    ae.kill(context, context.getAction());
    HadoopAccessorService hadoopAccessorService = getHadoopAccessorService();
    Configuration config = hadoopAccessorService.createConfiguration(getJobTrackerUri());
    YarnClient yarnClient = hadoopAccessorService.createYarnClient(getTestUser(), config);
    // check that both the launcher & MR job were successfully killed
    ApplicationId jobId = childSet.iterator().next();
    assertEquals(YarnApplicationState.KILLED, yarnClient.getApplicationReport(jobId).getYarnApplicationState());
    assertTrue(ae.isCompleted(context.getAction().getExternalStatus()));
    assertEquals(WorkflowAction.Status.DONE, context.getAction().getStatus());
    assertEquals(JavaActionExecutor.KILLED, context.getAction().getExternalStatus());
    assertEquals(FinalApplicationStatus.KILLED, yarnClient.getApplicationReport(ConverterUtils.toApplicationId(runningJob)).getFinalApplicationStatus());
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) YarnApplicationState(org.apache.hadoop.yarn.api.records.YarnApplicationState) PrivilegedExceptionAction(java.security.PrivilegedExceptionAction) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) ActionExecutorException(org.apache.oozie.action.ActionExecutorException) IOException(java.io.IOException) YarnClient(org.apache.hadoop.yarn.client.api.YarnClient) UserGroupInformationService(org.apache.oozie.service.UserGroupInformationService) XConfiguration(org.apache.oozie.util.XConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) StringReader(java.io.StringReader) WorkflowJob(org.apache.oozie.client.WorkflowJob) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) JobConf(org.apache.hadoop.mapred.JobConf) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) HashSet(java.util.HashSet)

Example 12 with HadoopAccessorService

use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.

the class TestOozieSharelibCLI method getTargetFileSysyem.

private FileSystem getTargetFileSysyem() throws Exception {
    if (fs == null) {
        HadoopAccessorService has = getServices().get(HadoopAccessorService.class);
        URI uri = new Path(outPath).toUri();
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        fs = has.createFileSystem(System.getProperty("user.name"), uri, fsConf);
    }
    return fs;
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) URI(java.net.URI)

Example 13 with HadoopAccessorService

use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.

the class OozieSharelibCLI method run.

public synchronized int run(String[] args) throws Exception {
    if (used) {
        throw new IllegalStateException("CLI instance already used");
    }
    used = true;
    CLIParser parser = new CLIParser("oozie-setup.sh", HELP_INFO);
    String oozieHome = System.getProperty(OOZIE_HOME);
    parser.addCommand(HELP_CMD, "", "display usage for all commands or specified command", new Options(), false);
    parser.addCommand(CREATE_CMD, "", "create a new timestamped version of oozie sharelib", createUpgradeOptions(CREATE_CMD), false);
    parser.addCommand(UPGRADE_CMD, "", "[deprecated][use command \"create\" to create new version]   upgrade oozie sharelib \n", createUpgradeOptions(UPGRADE_CMD), false);
    try {
        final CLIParser.Command command = parser.parse(args);
        String sharelibAction = command.getName();
        if (sharelibAction.equals(HELP_CMD)) {
            parser.showHelp(command.getCommandLine());
            return 0;
        }
        if (!command.getCommandLine().hasOption(FS_OPT)) {
            throw new Exception("-fs option must be specified");
        }
        int threadPoolSize = Integer.valueOf(command.getCommandLine().getOptionValue(CONCURRENCY_OPT, "1"));
        File srcFile = null;
        // Check whether user provided locallib
        if (command.getCommandLine().hasOption(LIB_OPT)) {
            srcFile = new File(command.getCommandLine().getOptionValue(LIB_OPT));
        } else {
            // Since user did not provide locallib, find the default one under oozie home dir
            Collection<File> files = FileUtils.listFiles(new File(oozieHome), new WildcardFileFilter("oozie-sharelib*.tar.gz"), null);
            if (files.size() > 1) {
                throw new IOException("more than one sharelib tar found at " + oozieHome);
            }
            if (files.isEmpty()) {
                throw new IOException("default sharelib tar not found in oozie home dir: " + oozieHome);
            }
            srcFile = files.iterator().next();
        }
        File temp = File.createTempFile("oozie", ".dir");
        temp.delete();
        temp.mkdir();
        temp.deleteOnExit();
        // Check whether the lib is a tar file or folder
        if (!srcFile.isDirectory()) {
            FileUtil.unTar(srcFile, temp);
            srcFile = new File(temp.toString() + "/share/lib");
        } else {
            // Get the lib directory since it's a folder
            srcFile = new File(srcFile, "lib");
        }
        String hdfsUri = command.getCommandLine().getOptionValue(FS_OPT);
        Path srcPath = new Path(srcFile.toString());
        Services services = new Services();
        services.getConf().set(Services.CONF_SERVICE_CLASSES, "org.apache.oozie.service.LiteWorkflowAppService, org.apache.oozie.service.HadoopAccessorService");
        services.getConf().set(Services.CONF_SERVICE_EXT_CLASSES, "");
        services.init();
        WorkflowAppService lwas = services.get(WorkflowAppService.class);
        HadoopAccessorService has = services.get(HadoopAccessorService.class);
        Path dstPath = lwas.getSystemLibPath();
        URI uri = new Path(hdfsUri).toUri();
        Configuration fsConf = has.createConfiguration(uri.getAuthority());
        FileSystem fs = FileSystem.get(uri, fsConf);
        if (!fs.exists(dstPath)) {
            fs.mkdirs(dstPath);
        }
        ECPolicyDisabler.tryDisableECPolicyForPath(fs, dstPath);
        if (sharelibAction.equals(CREATE_CMD) || sharelibAction.equals(UPGRADE_CMD)) {
            dstPath = new Path(dstPath.toString() + Path.SEPARATOR + SHARE_LIB_PREFIX + getTimestampDirectory());
        }
        System.out.println("the destination path for sharelib is: " + dstPath);
        if (!srcFile.exists()) {
            throw new IOException(srcPath + " cannot be found");
        }
        if (threadPoolSize > 1) {
            concurrentCopyFromLocal(fs, threadPoolSize, srcFile, dstPath);
        } else {
            fs.copyFromLocalFile(false, srcPath, dstPath);
        }
        services.destroy();
        FileUtils.deleteDirectory(temp);
        return 0;
    } catch (ParseException ex) {
        System.err.println("Invalid sub-command: " + ex.getMessage());
        System.err.println();
        System.err.println(parser.shortHelp());
        return 1;
    } catch (Exception ex) {
        logError(ex.getMessage(), ex);
        return 1;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Options(org.apache.commons.cli.Options) Configuration(org.apache.hadoop.conf.Configuration) WorkflowAppService(org.apache.oozie.service.WorkflowAppService) CLIParser(org.apache.oozie.cli.CLIParser) IOException(java.io.IOException) WildcardFileFilter(org.apache.commons.io.filefilter.WildcardFileFilter) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService) URI(java.net.URI) CancellationException(java.util.concurrent.CancellationException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) ParseException(org.apache.commons.cli.ParseException) Services(org.apache.oozie.service.Services) FileSystem(org.apache.hadoop.fs.FileSystem) ParseException(org.apache.commons.cli.ParseException) File(java.io.File)

Example 14 with HadoopAccessorService

use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.

the class JavaActionExecutor method setupActionConf.

Configuration setupActionConf(Configuration actionConf, Context context, Element actionXml, Path appPath) throws ActionExecutorException {
    try {
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        XConfiguration actionDefaults = has.createActionDefaultConf(actionConf.get(HADOOP_YARN_RM), getType());
        XConfiguration.copy(actionDefaults, actionConf);
        has.checkSupportedFilesystem(appPath.toUri());
        // Set the Java Main Class for the Java action to give to the Java launcher
        setJavaMain(actionConf, actionXml);
        parseJobXmlAndConfiguration(context, actionXml, appPath, actionConf);
        // set cancel.delegation.token in actionConf that child job doesn't cancel delegation token
        actionConf.setBoolean("mapreduce.job.complete.cancel.delegation.tokens", false);
        setRootLoggerLevel(actionConf);
        return actionConf;
    } catch (IOException ex) {
        throw convertException(ex);
    } catch (HadoopAccessorException ex) {
        throw convertException(ex);
    } catch (URISyntaxException ex) {
        throw convertException(ex);
    }
}
Also used : XConfiguration(org.apache.oozie.util.XConfiguration) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) URISyntaxException(java.net.URISyntaxException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService)

Example 15 with HadoopAccessorService

use of org.apache.oozie.service.HadoopAccessorService in project oozie by apache.

the class BundleSubmitXCommand method mergeDefaultConfig.

/**
 * Merge default configuration with user-defined configuration.
 *
 * @throws CommandException thrown if failed to merge configuration
 */
protected void mergeDefaultConfig() throws CommandException {
    Path configDefault = null;
    try {
        String bundleAppPathStr = conf.get(OozieClient.BUNDLE_APP_PATH);
        Path bundleAppPath = new Path(bundleAppPathStr);
        String user = ParamChecker.notEmpty(conf.get(OozieClient.USER_NAME), OozieClient.USER_NAME);
        HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
        Configuration fsConf = has.createConfiguration(bundleAppPath.toUri().getAuthority());
        FileSystem fs = has.createFileSystem(user, bundleAppPath.toUri(), fsConf);
        // app path could be a directory
        if (!fs.isFile(bundleAppPath)) {
            configDefault = new Path(bundleAppPath, CONFIG_DEFAULT);
        } else {
            configDefault = new Path(bundleAppPath.getParent(), CONFIG_DEFAULT);
        }
        if (fs.exists(configDefault)) {
            Configuration defaultConf = new XConfiguration(fs.open(configDefault));
            PropertiesUtils.checkDisallowedProperties(defaultConf, DISALLOWED_DEFAULT_PROPERTIES);
            XConfiguration.injectDefaults(defaultConf, conf);
        } else {
            LOG.info("configDefault Doesn't exist " + configDefault);
        }
        PropertiesUtils.checkDisallowedProperties(conf, DISALLOWED_USER_PROPERTIES);
    } catch (IOException e) {
        throw new CommandException(ErrorCode.E0702, e.getMessage() + " : Problem reading default config " + configDefault, e);
    } catch (HadoopAccessorException e) {
        throw new CommandException(e);
    }
    LOG.debug("Merged CONF :" + XmlUtils.prettyPrint(conf).toString());
}
Also used : Path(org.apache.hadoop.fs.Path) XConfiguration(org.apache.oozie.util.XConfiguration) Configuration(org.apache.hadoop.conf.Configuration) XConfiguration(org.apache.oozie.util.XConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) HadoopAccessorException(org.apache.oozie.service.HadoopAccessorException) IOException(java.io.IOException) CommandException(org.apache.oozie.command.CommandException) HadoopAccessorService(org.apache.oozie.service.HadoopAccessorService)

Aggregations

HadoopAccessorService (org.apache.oozie.service.HadoopAccessorService)21 Configuration (org.apache.hadoop.conf.Configuration)17 XConfiguration (org.apache.oozie.util.XConfiguration)14 IOException (java.io.IOException)13 Path (org.apache.hadoop.fs.Path)12 FileSystem (org.apache.hadoop.fs.FileSystem)11 URI (java.net.URI)10 HadoopAccessorException (org.apache.oozie.service.HadoopAccessorException)10 URISyntaxException (java.net.URISyntaxException)6 CommandException (org.apache.oozie.command.CommandException)6 StringReader (java.io.StringReader)4 Element (org.jdom.Element)4 JDOMException (org.jdom.JDOMException)4 InputStreamReader (java.io.InputStreamReader)3 StringWriter (java.io.StringWriter)3 HashMap (java.util.HashMap)3 JPAExecutorException (org.apache.oozie.executor.jpa.JPAExecutorException)3 WorkflowAppService (org.apache.oozie.service.WorkflowAppService)3 SAXException (org.xml.sax.SAXException)3 Reader (java.io.Reader)2