Search in sources :

Example 6 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class RumenToSLSConverter method main.

public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption("input", true, "input rumen json file");
    options.addOption("outputJobs", true, "output jobs file");
    options.addOption("outputNodes", true, "output nodes file");
    CommandLineParser parser = new GnuParser();
    CommandLine cmd = parser.parse(options, args);
    if (!cmd.hasOption("input") || !cmd.hasOption("outputJobs") || !cmd.hasOption("outputNodes")) {
        System.err.println();
        System.err.println("ERROR: Missing input or output file");
        System.err.println();
        System.err.println("LoadGenerator creates a SLS script " + "from a Hadoop Rumen output");
        System.err.println();
        System.err.println("Options: -input FILE -outputJobs FILE " + "-outputNodes FILE");
        System.err.println();
        System.exit(1);
    }
    String inputFile = cmd.getOptionValue("input");
    String outputJsonFile = cmd.getOptionValue("outputJobs");
    String outputNodeFile = cmd.getOptionValue("outputNodes");
    // check existing
    if (!new File(inputFile).exists()) {
        System.err.println();
        System.err.println("ERROR: input does not exist");
        System.exit(1);
    }
    if (new File(outputJsonFile).exists()) {
        System.err.println();
        System.err.println("ERROR: output job file is existing");
        System.exit(1);
    }
    if (new File(outputNodeFile).exists()) {
        System.err.println();
        System.err.println("ERROR: output node file is existing");
        System.exit(1);
    }
    File jsonFile = new File(outputJsonFile);
    if (!jsonFile.getParentFile().exists() && !jsonFile.getParentFile().mkdirs()) {
        System.err.println("ERROR: Cannot create output directory in path: " + jsonFile.getParentFile().getAbsoluteFile());
        System.exit(1);
    }
    File nodeFile = new File(outputNodeFile);
    if (!nodeFile.getParentFile().exists() && !nodeFile.getParentFile().mkdirs()) {
        System.err.println("ERROR: Cannot create output directory in path: " + nodeFile.getParentFile().getAbsoluteFile());
        System.exit(1);
    }
    generateSLSLoadFile(inputFile, outputJsonFile);
    generateSLSNodeFile(outputNodeFile);
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) CommandLineParser(org.apache.commons.cli.CommandLineParser) File(java.io.File)

Example 7 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class RegistryCli method rm.

@SuppressWarnings("unchecked")
public int rm(String[] args) {
    Option recursive = OptionBuilder.withArgName("recursive").withDescription("delete recursively").create("r");
    Options rmOption = new Options();
    rmOption.addOption(recursive);
    boolean recursiveOpt = false;
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(rmOption, args);
        List<String> argsList = line.getArgList();
        if (argsList.size() != 2) {
            return usageError("RM requires exactly one path argument", RM_USAGE);
        }
        if (!validatePath(argsList.get(1))) {
            return -1;
        }
        try {
            if (line.hasOption("r")) {
                recursiveOpt = true;
            }
            registry.delete(argsList.get(1), recursiveOpt);
            return 0;
        } catch (Exception e) {
            syserr.println(analyzeException("rm", e, argsList));
        }
        return -1;
    } catch (ParseException exp) {
        return usageError("Invalid syntax " + exp.toString(), RM_USAGE);
    }
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) URISyntaxException(java.net.URISyntaxException) InvalidRecordException(org.apache.hadoop.registry.client.exceptions.InvalidRecordException) InvalidPathnameException(org.apache.hadoop.registry.client.exceptions.InvalidPathnameException) AuthenticationFailedException(org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException) PathNotFoundException(org.apache.hadoop.fs.PathNotFoundException) NoRecordException(org.apache.hadoop.registry.client.exceptions.NoRecordException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) AccessControlException(org.apache.hadoop.security.AccessControlException) NoPathPermissionsException(org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException)

Example 8 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class RegistryCli method resolve.

@SuppressWarnings("unchecked")
public int resolve(String[] args) {
    Options resolveOption = new Options();
    CommandLineParser parser = new GnuParser();
    try {
        CommandLine line = parser.parse(resolveOption, args);
        List<String> argsList = line.getArgList();
        if (argsList.size() != 2) {
            return usageError("resolve requires exactly one path argument", RESOLVE_USAGE);
        }
        if (!validatePath(argsList.get(1))) {
            return -1;
        }
        try {
            ServiceRecord record = registry.resolve(argsList.get(1));
            for (Endpoint endpoint : record.external) {
                sysout.println(" Endpoint(ProtocolType=" + endpoint.protocolType + ", Api=" + endpoint.api + ");" + " Addresses(AddressType=" + endpoint.addressType + ") are: ");
                for (Map<String, String> address : endpoint.addresses) {
                    sysout.println("[ ");
                    for (Map.Entry<String, String> entry : address.entrySet()) {
                        sysout.print("\t" + entry.getKey() + ":" + entry.getValue());
                    }
                    sysout.println("\n]");
                }
                sysout.println();
            }
            return 0;
        } catch (Exception e) {
            syserr.println(analyzeException("resolve", e, argsList));
        }
        return -1;
    } catch (ParseException exp) {
        return usageError("Invalid syntax " + exp, RESOLVE_USAGE);
    }
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) URISyntaxException(java.net.URISyntaxException) InvalidRecordException(org.apache.hadoop.registry.client.exceptions.InvalidRecordException) InvalidPathnameException(org.apache.hadoop.registry.client.exceptions.InvalidPathnameException) AuthenticationFailedException(org.apache.hadoop.registry.client.exceptions.AuthenticationFailedException) PathNotFoundException(org.apache.hadoop.fs.PathNotFoundException) NoRecordException(org.apache.hadoop.registry.client.exceptions.NoRecordException) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException) AccessControlException(org.apache.hadoop.security.AccessControlException) NoPathPermissionsException(org.apache.hadoop.registry.client.exceptions.NoPathPermissionsException) ServiceRecord(org.apache.hadoop.registry.client.types.ServiceRecord) CommandLine(org.apache.commons.cli.CommandLine) Endpoint(org.apache.hadoop.registry.client.types.Endpoint) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) Map(java.util.Map)

Example 9 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class SLSRunner method main.

public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption("inputrumen", true, "input rumen files");
    options.addOption("inputsls", true, "input sls files");
    options.addOption("nodes", true, "input topology");
    options.addOption("output", true, "output directory");
    options.addOption("trackjobs", true, "jobs to be tracked during simulating");
    options.addOption("printsimulation", false, "print out simulation information");
    CommandLineParser parser = new GnuParser();
    CommandLine cmd = parser.parse(options, args);
    String inputRumen = cmd.getOptionValue("inputrumen");
    String inputSLS = cmd.getOptionValue("inputsls");
    String output = cmd.getOptionValue("output");
    if ((inputRumen == null && inputSLS == null) || output == null) {
        System.err.println();
        System.err.println("ERROR: Missing input or output file");
        System.err.println();
        System.err.println("Options: -inputrumen|-inputsls FILE,FILE... " + "-output FILE [-nodes FILE] [-trackjobs JobId,JobId...] " + "[-printsimulation]");
        System.err.println();
        System.exit(1);
    }
    File outputFile = new File(output);
    if (!outputFile.exists() && !outputFile.mkdirs()) {
        System.err.println("ERROR: Cannot create output directory " + outputFile.getAbsolutePath());
        System.exit(1);
    }
    Set<String> trackedJobSet = new HashSet<String>();
    if (cmd.hasOption("trackjobs")) {
        String trackjobs = cmd.getOptionValue("trackjobs");
        String[] jobIds = trackjobs.split(",");
        trackedJobSet.addAll(Arrays.asList(jobIds));
    }
    String nodeFile = cmd.hasOption("nodes") ? cmd.getOptionValue("nodes") : "";
    boolean isSLS = inputSLS != null;
    String[] inputFiles = isSLS ? inputSLS.split(",") : inputRumen.split(",");
    SLSRunner sls = new SLSRunner(isSLS, inputFiles, nodeFile, output, trackedJobSet, cmd.hasOption("printsimulation"));
    sls.start();
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) CommandLineParser(org.apache.commons.cli.CommandLineParser) File(java.io.File) HashSet(java.util.HashSet)

Example 10 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class ClusterCLI method run.

@Override
public int run(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption("lnl", LIST_LABELS_CMD, false, "List cluster node-label collection");
    opts.addOption("h", HELP_CMD, false, "Displays help for all commands.");
    opts.addOption("dnl", DIRECTLY_ACCESS_NODE_LABEL_STORE, false, "This is DEPRECATED, will be removed in future releases. Directly access node label store, " + "with this option, all node label related operations" + " will NOT connect RM. Instead, they will" + " access/modify stored node labels directly." + " By default, it is false (access via RM)." + " AND PLEASE NOTE: if you configured " + YarnConfiguration.FS_NODE_LABELS_STORE_ROOT_DIR + " to a local directory" + " (instead of NFS or HDFS), this option will only work" + " when the command run on the machine where RM is running." + " Also, this option is UNSTABLE, could be removed in future" + " releases.");
    int exitCode = -1;
    CommandLine parsedCli = null;
    try {
        parsedCli = new GnuParser().parse(opts, args);
    } catch (MissingArgumentException ex) {
        sysout.println("Missing argument for options");
        printUsage(opts);
        return exitCode;
    }
    if (parsedCli.hasOption(DIRECTLY_ACCESS_NODE_LABEL_STORE)) {
        accessLocal = true;
    }
    if (parsedCli.hasOption(LIST_LABELS_CMD)) {
        printClusterNodeLabels();
    } else if (parsedCli.hasOption(HELP_CMD)) {
        printUsage(opts);
        return 0;
    } else {
        syserr.println("Invalid Command Usage : ");
        printUsage(opts);
    }
    return 0;
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) MissingArgumentException(org.apache.commons.cli.MissingArgumentException) GnuParser(org.apache.commons.cli.GnuParser)

Aggregations

GnuParser (org.apache.commons.cli.GnuParser)208 CommandLine (org.apache.commons.cli.CommandLine)187 Options (org.apache.commons.cli.Options)165 CommandLineParser (org.apache.commons.cli.CommandLineParser)158 ParseException (org.apache.commons.cli.ParseException)139 HelpFormatter (org.apache.commons.cli.HelpFormatter)92 Path (org.apache.hadoop.fs.Path)40 Option (org.apache.commons.cli.Option)39 IOException (java.io.IOException)32 Job (org.apache.hadoop.mapreduce.Job)27 File (java.io.File)24 Configuration (org.apache.hadoop.conf.Configuration)19 FileInputStream (java.io.FileInputStream)14 ArrayList (java.util.ArrayList)14 Properties (java.util.Properties)13 FileSystem (org.apache.hadoop.fs.FileSystem)11 MissingArgumentException (org.apache.commons.cli.MissingArgumentException)9 FileNotFoundException (java.io.FileNotFoundException)7 URI (java.net.URI)7 URISyntaxException (java.net.URISyntaxException)6