Search in sources :

Example 11 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hbase by apache.

the class WALPrettyPrinter method run.

/**
   * Pass one or more log file names and formatting options and it will dump out
   * a text version of the contents on <code>stdout</code>.
   *
   * @param args
   *          Command line arguments
   * @throws IOException
   *           Thrown upon file system errors etc.
   */
public static void run(String[] args) throws IOException {
    // create options
    Options options = new Options();
    options.addOption("h", "help", false, "Output help message");
    options.addOption("j", "json", false, "Output JSON");
    options.addOption("p", "printvals", false, "Print values");
    options.addOption("r", "region", true, "Region to filter by. Pass encoded region name; e.g. '9192caead6a5a20acb4454ffbc79fa14'");
    options.addOption("s", "sequence", true, "Sequence to filter by. Pass sequence number.");
    options.addOption("w", "row", true, "Row to filter by. Pass row name.");
    WALPrettyPrinter printer = new WALPrettyPrinter();
    CommandLineParser parser = new PosixParser();
    List<?> files = null;
    try {
        CommandLine cmd = parser.parse(options, args);
        files = cmd.getArgList();
        if (files.isEmpty() || cmd.hasOption("h")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("WAL <filename...>", options, true);
            System.exit(-1);
        }
        // configure the pretty printer using command line options
        if (cmd.hasOption("p"))
            printer.enableValues();
        if (cmd.hasOption("j"))
            printer.enableJSON();
        if (cmd.hasOption("r"))
            printer.setRegionFilter(cmd.getOptionValue("r"));
        if (cmd.hasOption("s"))
            printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
        if (cmd.hasOption("w"))
            printer.setRowFilter(cmd.getOptionValue("w"));
    } catch (ParseException e) {
        e.printStackTrace();
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("HFile filename(s) ", options, true);
        System.exit(-1);
    }
    // get configuration, file system, and process the given files
    Configuration conf = HBaseConfiguration.create();
    FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
    // begin output
    printer.beginPersistentOutput();
    for (Object f : files) {
        Path file = new Path((String) f);
        FileSystem fs = file.getFileSystem(conf);
        if (!fs.exists(file)) {
            System.err.println("ERROR, file doesnt exist: " + file);
            return;
        }
        printer.processFile(conf, file);
    }
    printer.endPersistentOutput();
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Path(org.apache.hadoop.fs.Path) Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) PosixParser(org.apache.commons.cli.PosixParser) FileSystem(org.apache.hadoop.fs.FileSystem) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 12 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hbase by apache.

the class RESTServer method printUsageAndExit.

private static void printUsageAndExit(Options options, int exitCode) {
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("hbase rest start", "", options, "\nTo run the REST server as a daemon, execute " + "hbase-daemon.sh start|stop rest [--infoport <port>] [-p <port>] [-ro]\n", true);
    System.exit(exitCode);
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter)

Example 13 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hadoop by apache.

the class TimelineSchemaCreator method parseArgs.

/**
   * Parse command-line arguments.
   *
   * @param args
   *          command line arguments passed to program.
   * @return parsed command line.
   * @throws ParseException
   */
private static CommandLine parseArgs(String[] args) throws ParseException {
    Options options = new Options();
    // Input
    Option o = new Option(ENTITY_TABLE_NAME_SHORT, "entityTableName", true, "entity table name");
    o.setArgName("entityTableName");
    o.setRequired(false);
    options.addOption(o);
    o = new Option(TTL_OPTION_SHORT, "metricsTTL", true, "TTL for metrics column family");
    o.setArgName("metricsTTL");
    o.setRequired(false);
    options.addOption(o);
    o = new Option(APP_TO_FLOW_TABLE_NAME_SHORT, "appToflowTableName", true, "app to flow table name");
    o.setArgName("appToflowTableName");
    o.setRequired(false);
    options.addOption(o);
    o = new Option(APP_TABLE_NAME_SHORT, "applicationTableName", true, "application table name");
    o.setArgName("applicationTableName");
    o.setRequired(false);
    options.addOption(o);
    // Options without an argument
    // No need to set arg name since we do not need an argument here
    o = new Option(SKIP_EXISTING_TABLE_OPTION_SHORT, "skipExistingTable", false, "skip existing Hbase tables and continue to create new tables");
    o.setRequired(false);
    options.addOption(o);
    CommandLineParser parser = new PosixParser();
    CommandLine commandLine = null;
    try {
        commandLine = parser.parse(options, args);
    } catch (Exception e) {
        LOG.error("ERROR: " + e.getMessage() + "\n");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(NAME + " ", options, true);
        System.exit(-1);
    }
    return commandLine;
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) PosixParser(org.apache.commons.cli.PosixParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser) IOException(java.io.IOException) ParseException(org.apache.commons.cli.ParseException)

Example 14 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hbase by apache.

the class ProcedureWALPrettyPrinter method run.

/**
   * Pass one or more log file names and formatting options and it will dump out
   * a text version of the contents on <code>stdout</code>.
   *
   * @param args
   *          Command line arguments
   * @throws IOException
   *           Thrown upon file system errors etc.
   */
public int run(final String[] args) throws IOException {
    // create options
    Options options = new Options();
    options.addOption("h", "help", false, "Output help message");
    options.addOption("f", "file", true, "File to print");
    final List<Path> files = new ArrayList<>();
    try {
        CommandLine cmd = new PosixParser().parse(options, args);
        if (cmd.hasOption("f")) {
            files.add(new Path(cmd.getOptionValue("f")));
        }
        if (files.isEmpty() || cmd.hasOption("h")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("ProcedureWALPrettyPrinter ", options, true);
            return (-1);
        }
    } catch (ParseException e) {
        e.printStackTrace();
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("ProcedureWALPrettyPrinter ", options, true);
        return (-1);
    }
    // get configuration, file system, and process the given files
    for (Path file : files) {
        processFile(getConf(), file);
    }
    return (0);
}
Also used : Path(org.apache.hadoop.fs.Path) HelpFormatter(org.apache.commons.cli.HelpFormatter) Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) PosixParser(org.apache.commons.cli.PosixParser) ArrayList(java.util.ArrayList) ParseException(org.apache.commons.cli.ParseException)

Example 15 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hive by apache.

the class HiveMetaTool method main.

public static void main(String[] args) {
    HiveMetaTool metaTool = new HiveMetaTool();
    metaTool.init();
    CommandLineParser parser = new GnuParser();
    CommandLine line = null;
    try {
        try {
            line = parser.parse(metaTool.cmdLineOptions, args);
        } catch (ParseException e) {
            System.err.println("HiveMetaTool:Parsing failed.  Reason: " + e.getLocalizedMessage());
            printAndExit(metaTool);
        }
        if (line.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("metatool", metaTool.cmdLineOptions);
        } else if (line.hasOption("listFSRoot")) {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            }
            metaTool.listFSRoot();
        } else if (line.hasOption("executeJDOQL")) {
            String query = line.getOptionValue("executeJDOQL");
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            }
            if (query.toLowerCase().trim().startsWith("select")) {
                metaTool.executeJDOQLSelect(query);
            } else if (query.toLowerCase().trim().startsWith("update")) {
                metaTool.executeJDOQLUpdate(query);
            } else {
                System.err.println("HiveMetaTool:Unsupported statement type");
                printAndExit(metaTool);
            }
        } else if (line.hasOption("updateLocation")) {
            String[] loc = line.getOptionValues("updateLocation");
            boolean isDryRun = false;
            String serdepropKey = null;
            String tablePropKey = null;
            if (loc.length != 2 && loc.length != 3) {
                System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + "optional arguments but " + "was passed " + loc.length + " arguments");
                printAndExit(metaTool);
            }
            Path newPath = new Path(loc[0]);
            Path oldPath = new Path(loc[1]);
            URI oldURI = oldPath.toUri();
            URI newURI = newPath.toUri();
            if (line.hasOption("dryRun")) {
                isDryRun = true;
            }
            if (line.hasOption("serdePropKey")) {
                serdepropKey = line.getOptionValue("serdePropKey");
            }
            if (line.hasOption("tablePropKey")) {
                tablePropKey = line.getOptionValue("tablePropKey");
            }
            /*
         * validate input - Both new and old URI should contain valid host names and valid schemes.
         * port is optional in both the URIs since HDFS HA NN URI doesn't have a port.
         */
            if (oldURI.getHost() == null || newURI.getHost() == null) {
                System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
            } else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
                System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
            } else {
                metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
            }
        } else {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
                printAndExit(metaTool);
            } else {
                System.err.print("HiveMetaTool:Parsing failed.  Reason: Invalid arguments: ");
                for (String s : line.getArgs()) {
                    System.err.print(s + " ");
                }
                System.err.println();
            }
            printAndExit(metaTool);
        }
    } finally {
        metaTool.shutdownObjectStore();
    }
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Path(org.apache.hadoop.fs.Path) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) URI(java.net.URI)

Aggregations

HelpFormatter (org.apache.commons.cli.HelpFormatter)273 Options (org.apache.commons.cli.Options)136 CommandLine (org.apache.commons.cli.CommandLine)126 CommandLineParser (org.apache.commons.cli.CommandLineParser)110 ParseException (org.apache.commons.cli.ParseException)103 GnuParser (org.apache.commons.cli.GnuParser)92 Path (org.apache.hadoop.fs.Path)42 PrintWriter (java.io.PrintWriter)35 Option (org.apache.commons.cli.Option)29 Job (org.apache.hadoop.mapreduce.Job)27 Configuration (org.apache.hadoop.conf.Configuration)21 File (java.io.File)17 IOException (java.io.IOException)14 DefaultParser (org.apache.commons.cli.DefaultParser)13 PosixParser (org.apache.commons.cli.PosixParser)12 FileSystem (org.apache.hadoop.fs.FileSystem)12 BasicParser (org.apache.commons.cli.BasicParser)11 ArrayList (java.util.ArrayList)8 URI (java.net.URI)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6