Search in sources :

Example 6 with ParseException

use of org.apache.commons.cli.ParseException in project hadoop by apache.

the class HadoopArchiveLogs method handleOpts.

private void handleOpts(String[] args) throws ParseException {
    Options opts = new Options();
    Option helpOpt = new Option(HELP_OPTION, false, "Prints this message");
    Option maxEligibleOpt = new Option(MAX_ELIGIBLE_APPS_OPTION, true, "The maximum number of eligible apps to process (default: " + DEFAULT_MAX_ELIGIBLE + " (all))");
    maxEligibleOpt.setArgName("n");
    Option minNumLogFilesOpt = new Option(MIN_NUM_LOG_FILES_OPTION, true, "The minimum number of log files required to be eligible (default: " + DEFAULT_MIN_NUM_LOG_FILES + ")");
    minNumLogFilesOpt.setArgName("n");
    Option maxTotalLogsSizeOpt = new Option(MAX_TOTAL_LOGS_SIZE_OPTION, true, "The maximum total logs size (in megabytes) required to be eligible" + " (default: " + DEFAULT_MAX_TOTAL_LOGS_SIZE + ")");
    maxTotalLogsSizeOpt.setArgName("megabytes");
    Option memoryOpt = new Option(MEMORY_OPTION, true, "The amount of memory (in megabytes) for each container (default: " + DEFAULT_MEMORY + ")");
    memoryOpt.setArgName("megabytes");
    Option verboseOpt = new Option(VERBOSE_OPTION, false, "Print more details.");
    Option forceOpt = new Option(FORCE_OPTION, false, "Force recreating the working directory if an existing one is found. " + "This should only be used if you know that another instance is " + "not currently running");
    Option noProxyOpt = new Option(NO_PROXY_OPTION, false, "When specified, all processing will be done as the user running this" + " command (or the Yarn user if DefaultContainerExecutor is in " + "use). When not specified, all processing will be done as the " + "user who owns that application; if the user running this command" + " is not allowed to impersonate that user, it will fail");
    opts.addOption(helpOpt);
    opts.addOption(maxEligibleOpt);
    opts.addOption(minNumLogFilesOpt);
    opts.addOption(maxTotalLogsSizeOpt);
    opts.addOption(memoryOpt);
    opts.addOption(verboseOpt);
    opts.addOption(forceOpt);
    opts.addOption(noProxyOpt);
    try {
        CommandLineParser parser = new GnuParser();
        CommandLine commandLine = parser.parse(opts, args);
        if (commandLine.hasOption(HELP_OPTION)) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("mapred archive-logs", opts);
            System.exit(0);
        }
        if (commandLine.hasOption(MAX_ELIGIBLE_APPS_OPTION)) {
            maxEligible = Integer.parseInt(commandLine.getOptionValue(MAX_ELIGIBLE_APPS_OPTION));
            if (maxEligible == 0) {
                LOG.info("Setting " + MAX_ELIGIBLE_APPS_OPTION + " to 0 accomplishes " + "nothing. Please either set it to a negative value " + "(default, all) or a more reasonable value.");
                System.exit(0);
            }
        }
        if (commandLine.hasOption(MIN_NUM_LOG_FILES_OPTION)) {
            minNumLogFiles = Integer.parseInt(commandLine.getOptionValue(MIN_NUM_LOG_FILES_OPTION));
        }
        if (commandLine.hasOption(MAX_TOTAL_LOGS_SIZE_OPTION)) {
            maxTotalLogsSize = Long.parseLong(commandLine.getOptionValue(MAX_TOTAL_LOGS_SIZE_OPTION));
            maxTotalLogsSize *= 1024L * 1024L;
        }
        if (commandLine.hasOption(MEMORY_OPTION)) {
            memory = Long.parseLong(commandLine.getOptionValue(MEMORY_OPTION));
        }
        if (commandLine.hasOption(VERBOSE_OPTION)) {
            verbose = true;
        }
        if (commandLine.hasOption(FORCE_OPTION)) {
            force = true;
        }
        if (commandLine.hasOption(NO_PROXY_OPTION)) {
            proxy = false;
        }
    } catch (ParseException pe) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("mapred archive-logs", opts);
        throw pe;
    }
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 7 with ParseException

use of org.apache.commons.cli.ParseException in project hadoop by apache.

the class OptionsParser method parse.

/**
   * The parse method parses the command-line options, and creates
   * a corresponding Options object.
   * @param args Command-line arguments (excluding the options consumed
   *              by the GenericOptionsParser).
   * @return The Options object, corresponding to the specified command-line.
   * @throws IllegalArgumentException Thrown if the parse fails.
   */
public static DistCpOptions parse(String[] args) throws IllegalArgumentException {
    CommandLineParser parser = new CustomParser();
    CommandLine command;
    try {
        command = parser.parse(cliOptions, args, true);
    } catch (ParseException e) {
        throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e);
    }
    DistCpOptions option = parseSourceAndTargetPaths(command);
    option.setIgnoreFailures(command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch()));
    option.setAtomicCommit(command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch()));
    option.setSyncFolder(command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch()));
    option.setOverwrite(command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch()));
    option.setAppend(command.hasOption(DistCpOptionSwitch.APPEND.getSwitch()));
    option.setDeleteMissing(command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch()));
    option.setSkipCRC(command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch()));
    if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch()) && option.shouldAtomicCommit()) {
        String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch());
        if (workPath != null && !workPath.isEmpty()) {
            option.setAtomicWorkPath(new Path(workPath));
        }
    } else if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) {
        throw new IllegalArgumentException("-tmp work-path can only be specified along with -atomic");
    }
    if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) {
        option.setLogPath(new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch())));
    }
    if (command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) {
        option.setBlocking(false);
    }
    parseBandwidth(command, option);
    parseNumListStatusThreads(command, option);
    parseMaxMaps(command, option);
    if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) {
        option.setCopyStrategy(getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch()));
    }
    parsePreserveStatus(command, option);
    if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) {
        String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch());
        checkSnapshotsArgs(snapshots);
        option.setUseDiff(snapshots[0], snapshots[1]);
    }
    if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) {
        String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch());
        checkSnapshotsArgs(snapshots);
        option.setUseRdiff(snapshots[0], snapshots[1]);
    }
    parseFileLimit(command);
    parseSizeLimit(command);
    if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) {
        option.setFiltersFile(getVal(command, DistCpOptionSwitch.FILTERS.getSwitch()));
    }
    option.validate();
    return option;
}
Also used : Path(org.apache.hadoop.fs.Path) CommandLine(org.apache.commons.cli.CommandLine) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 8 with ParseException

use of org.apache.commons.cli.ParseException in project hadoop by apache.

the class RegistryCli method rm.

@SuppressWarnings("unchecked")
public int rm(String[] args) {
    Option recursive = OptionBuilder.withArgName("recursive").withDescription("delete recursively").create("r");
    Options rmOption = new Options();
    rmOption.addOption(recursive);
    boolean recursiveOpt = false;
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(rmOption, args);
        List<String> argsList = line.getArgList();
        if (argsList.size() != 2) {
            return usageError("RM requires exactly one path argument", RM_USAGE);
        }
        if (!validatePath(argsList.get(1))) {
            return -1;
        }
        try {
            if (line.hasOption("r")) {
                recursiveOpt = true;
            }
            registry.delete(argsList.get(1), recursiveOpt);
            return 0;
        } catch (Exception e) {
            syserr.println(analyzeException("rm", e, argsList));
        }
        return -1;
    } catch (ParseException exp) {
        return usageError("Invalid syntax " + exp.toString(), RM_USAGE);
    }
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) URISyntaxException(java.net.URISyntaxException) InvalidRecordException(org.apache.hadoop.registry.client.exceptions.InvalidRecordException) InvalidPathnameException(org.apache.hadoop.registry.client.exceptions.InvalidPathnameException) AuthenticationFailedException(org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException) PathNotFoundException(org.apache.hadoop.fs.PathNotFoundException) NoRecordException(org.apache.hadoop.registry.client.exceptions.NoRecordException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) AccessControlException(org.apache.hadoop.security.AccessControlException) NoPathPermissionsException(org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException)

Example 9 with ParseException

use of org.apache.commons.cli.ParseException in project hadoop by apache.

the class RegistryCli method resolve.

@SuppressWarnings("unchecked")
public int resolve(String[] args) {
    Options resolveOption = new Options();
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(resolveOption, args);
        List<String> argsList = line.getArgList();
        if (argsList.size() != 2) {
            return usageError("resolve requires exactly one path argument", RESOLVE_USAGE);
        }
        if (!validatePath(argsList.get(1))) {
            return -1;
        }
        try {
            ServiceRecord record = registry.resolve(argsList.get(1));
            for (Endpoint endpoint : record.external) {
                sysout.println(" Endpoint(ProtocolType=" + endpoint.protocolType + ", Api=" + endpoint.api + ");" + " Addresses(AddressType=" + endpoint.addressType + ") are: ");
                for (Map<String, String> address : endpoint.addresses) {
                    sysout.println("[ ");
                    for (Map.Entry<String, String> entry : address.entrySet()) {
                        sysout.print("\t" + entry.getKey() + ":" + entry.getValue());
                    }
                    sysout.println("\n]");
                }
                sysout.println();
            }
            return 0;
        } catch (Exception e) {
            syserr.println(analyzeException("resolve", e, argsList));
        }
        return -1;
    } catch (ParseException exp) {
        return usageError("Invalid syntax " + exp, RESOLVE_USAGE);
    }
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) URISyntaxException(java.net.URISyntaxException) InvalidRecordException(org.apache.hadoop.registry.client.exceptions.InvalidRecordException) InvalidPathnameException(org.apache.hadoop.registry.client.exceptions.InvalidPathnameException) AuthenticationFailedException(org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException) PathNotFoundException(org.apache.hadoop.fs.PathNotFoundException) NoRecordException(org.apache.hadoop.registry.client.exceptions.NoRecordException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) AccessControlException(org.apache.hadoop.security.AccessControlException) NoPathPermissionsException(org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException) ServiceRecord(org.apache.hadoop.registry.client.types.ServiceRecord) CommandLine(org.apache.commons.cli.CommandLine) Endpoint(org.apache.hadoop.registry.client.types.Endpoint) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) Map(java.util.Map)

Example 10 with ParseException

use of org.apache.commons.cli.ParseException in project hadoop by apache.

the class Submitter method run.

@Override
public int run(String[] args) throws Exception {
    CommandLineParser cli = new CommandLineParser();
    if (args.length == 0) {
        cli.printUsage();
        return 1;
    }
    cli.addOption("input", false, "input path to the maps", "path");
    cli.addOption("output", false, "output path from the reduces", "path");
    cli.addOption("jar", false, "job jar file", "path");
    cli.addOption("inputformat", false, "java classname of InputFormat", "class");
    //cli.addArgument("javareader", false, "is the RecordReader in Java");
    cli.addOption("map", false, "java classname of Mapper", "class");
    cli.addOption("partitioner", false, "java classname of Partitioner", "class");
    cli.addOption("reduce", false, "java classname of Reducer", "class");
    cli.addOption("writer", false, "java classname of OutputFormat", "class");
    cli.addOption("program", false, "URI to application executable", "class");
    cli.addOption("reduces", false, "number of reduces", "num");
    cli.addOption("jobconf", false, "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
    cli.addOption("lazyOutput", false, "Optional. Create output lazily", "boolean");
    Parser parser = cli.createParser();
    try {
        GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
        CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());
        JobConf job = new JobConf(getConf());
        if (results.hasOption("input")) {
            FileInputFormat.setInputPaths(job, results.getOptionValue("input"));
        }
        if (results.hasOption("output")) {
            FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output")));
        }
        if (results.hasOption("jar")) {
            job.setJar(results.getOptionValue("jar"));
        }
        if (results.hasOption("inputformat")) {
            setIsJavaRecordReader(job, true);
            job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
        }
        if (results.hasOption("javareader")) {
            setIsJavaRecordReader(job, true);
        }
        if (results.hasOption("map")) {
            setIsJavaMapper(job, true);
            job.setMapperClass(getClass(results, "map", job, Mapper.class));
        }
        if (results.hasOption("partitioner")) {
            job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class));
        }
        if (results.hasOption("reduce")) {
            setIsJavaReducer(job, true);
            job.setReducerClass(getClass(results, "reduce", job, Reducer.class));
        }
        if (results.hasOption("reduces")) {
            job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces")));
        }
        if (results.hasOption("writer")) {
            setIsJavaRecordWriter(job, true);
            job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class));
        }
        if (results.hasOption("lazyOutput")) {
            if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) {
                LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormat().getClass());
            }
        }
        if (results.hasOption("program")) {
            setExecutable(job, results.getOptionValue("program"));
        }
        if (results.hasOption("jobconf")) {
            LOG.warn("-jobconf option is deprecated, please use -D instead.");
            String options = results.getOptionValue("jobconf");
            StringTokenizer tokenizer = new StringTokenizer(options, ",");
            while (tokenizer.hasMoreTokens()) {
                String keyVal = tokenizer.nextToken().trim();
                String[] keyValSplit = keyVal.split("=");
                job.set(keyValSplit[0], keyValSplit[1]);
            }
        }
        // if they gave us a jar file, include it into the class path
        String jarFile = job.getJar();
        if (jarFile != null) {
            final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
            //FindBugs complains that creating a URLClassLoader should be
            //in a doPrivileged() block. 
            ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {

                public ClassLoader run() {
                    return new URLClassLoader(urls);
                }
            });
            job.setClassLoader(loader);
        }
        runJob(job);
        return 0;
    } catch (ParseException pe) {
        LOG.info("Error : " + pe);
        cli.printUsage();
        return 1;
    }
}
Also used : Path(org.apache.hadoop.fs.Path) NullOutputFormat(org.apache.hadoop.mapred.lib.NullOutputFormat) OutputFormat(org.apache.hadoop.mapred.OutputFormat) LazyOutputFormat(org.apache.hadoop.mapred.lib.LazyOutputFormat) FileOutputFormat(org.apache.hadoop.mapred.FileOutputFormat) URL(java.net.URL) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser) BasicParser(org.apache.commons.cli.BasicParser) Parser(org.apache.commons.cli.Parser) Mapper(org.apache.hadoop.mapred.Mapper) CommandLine(org.apache.commons.cli.CommandLine) StringTokenizer(java.util.StringTokenizer) InputFormat(org.apache.hadoop.mapred.InputFormat) FileInputFormat(org.apache.hadoop.mapred.FileInputFormat) URLClassLoader(java.net.URLClassLoader) URLClassLoader(java.net.URLClassLoader) ParseException(org.apache.commons.cli.ParseException) Reducer(org.apache.hadoop.mapred.Reducer) JobConf(org.apache.hadoop.mapred.JobConf) HashPartitioner(org.apache.hadoop.mapred.lib.HashPartitioner) Partitioner(org.apache.hadoop.mapred.Partitioner) GenericOptionsParser(org.apache.hadoop.util.GenericOptionsParser)

Aggregations

ParseException (org.apache.commons.cli.ParseException)587 CommandLine (org.apache.commons.cli.CommandLine)489 CommandLineParser (org.apache.commons.cli.CommandLineParser)381 Options (org.apache.commons.cli.Options)370 DefaultParser (org.apache.commons.cli.DefaultParser)220 HelpFormatter (org.apache.commons.cli.HelpFormatter)205 GnuParser (org.apache.commons.cli.GnuParser)173 IOException (java.io.IOException)124 Option (org.apache.commons.cli.Option)109 File (java.io.File)90 PosixParser (org.apache.commons.cli.PosixParser)65 Path (org.apache.hadoop.fs.Path)50 ArrayList (java.util.ArrayList)42 Properties (java.util.Properties)35 BasicParser (org.apache.commons.cli.BasicParser)32 FileInputStream (java.io.FileInputStream)29 Job (org.apache.hadoop.mapreduce.Job)27 Configuration (org.apache.hadoop.conf.Configuration)26 List (java.util.List)25 URI (java.net.URI)21