Search in sources :

Example 16 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hive by apache.

the class HiveMetaTool method printAndExit.

private static void printAndExit(HiveMetaTool metaTool) {
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("metatool", metaTool.cmdLineOptions);
    System.exit(1);
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter)

Example 17 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hbase by apache.

the class ThriftServer method printUsageAndExit.

private static void printUsageAndExit(Options options, int exitCode) throws ExitCodeException {
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("Thrift", null, options, "To start the Thrift server run 'hbase-daemon.sh start thrift'\n" + "To shutdown the thrift server run 'hbase-daemon.sh stop " + "thrift' or send a kill signal to the thrift server pid", true);
    throw new ExitCodeException(exitCode, "");
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) ExitCodeException(org.apache.hadoop.util.Shell.ExitCodeException)

Example 18 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hbase by apache.

the class TestJoinedScanners method main.

/**
   * Command line interface:
   * @param args
   * @throws IOException if there is a bug while reading from disk
   */
public static void main(final String[] args) throws Exception {
    Option encodingOption = new Option("e", "blockEncoding", true, "Data block encoding; Default: FAST_DIFF");
    encodingOption.setRequired(false);
    options.addOption(encodingOption);
    Option ratioOption = new Option("r", "selectionRatio", true, "Ratio of selected rows using essential column family");
    ratioOption.setRequired(false);
    options.addOption(ratioOption);
    Option widthOption = new Option("w", "valueWidth", true, "Width of value for non-essential column family");
    widthOption.setRequired(false);
    options.addOption(widthOption);
    CommandLineParser parser = new GnuParser();
    CommandLine cmd = parser.parse(options, args);
    if (args.length < 1) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("TestJoinedScanners", options, true);
    }
    if (cmd.hasOption("e")) {
        blockEncoding = DataBlockEncoding.valueOf(cmd.getOptionValue("e"));
    }
    if (cmd.hasOption("r")) {
        selectionRatio = Integer.parseInt(cmd.getOptionValue("r"));
    }
    if (cmd.hasOption("w")) {
        valueWidth = Integer.parseInt(cmd.getOptionValue("w"));
    }
    // run the test
    TestJoinedScanners test = new TestJoinedScanners();
    test.testJoinedScanners();
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser)

Example 19 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hive by apache.

the class LlapStatusOptionsProcessor method printUsage.

public static void printUsage() {
    HelpFormatter hf = new HelpFormatter();
    try {
        int width = hf.getWidth();
        int jlineWidth = TerminalFactory.get().getWidth();
        // Ignore potentially incorrect values
        width = Math.min(160, Math.max(jlineWidth, width));
        hf.setWidth(width);
    } catch (Throwable t) {
    // Ignore
    }
    LlapStatusOptionsProcessor optionsProcessor = new LlapStatusOptionsProcessor();
    hf.printHelp(LLAPSTATUS_CONSTANT, optionsProcessor.options);
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter)

Example 20 with HelpFormatter

use of org.apache.commons.cli.HelpFormatter in project hive by apache.

the class LlapDump method main.

public static void main(String[] args) throws Exception {
    Options opts = createOptions();
    CommandLine cli = new GnuParser().parse(opts, args);
    if (cli.hasOption('h')) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("llapdump", opts);
        return;
    }
    if (cli.hasOption('l')) {
        url = cli.getOptionValue("l");
    }
    if (cli.hasOption('u')) {
        user = cli.getOptionValue("u");
    }
    if (cli.hasOption('p')) {
        pwd = cli.getOptionValue("p");
    }
    if (cli.hasOption('n')) {
        numSplits = cli.getOptionValue("n");
    }
    Properties configProps = cli.getOptionProperties("hiveconf");
    if (cli.getArgs().length > 0) {
        query = cli.getArgs()[0];
    }
    if (query == null) {
        throw new IllegalArgumentException("No query string specified");
    }
    System.out.println("url: " + url);
    System.out.println("user: " + user);
    System.out.println("query: " + query);
    LlapRowInputFormat format = new LlapRowInputFormat();
    JobConf job = new JobConf();
    job.set(LlapBaseInputFormat.URL_KEY, url);
    job.set(LlapBaseInputFormat.USER_KEY, user);
    job.set(LlapBaseInputFormat.PWD_KEY, pwd);
    job.set(LlapBaseInputFormat.QUERY_KEY, query);
    // Additional conf settings specified on the command line
    for (String key : configProps.stringPropertyNames()) {
        job.set(key, configProps.getProperty(key));
    }
    InputSplit[] splits = format.getSplits(job, Integer.parseInt(numSplits));
    if (splits.length == 0) {
        System.out.println("No splits returned - empty scan");
        System.out.println("Results: ");
    } else {
        boolean first = true;
        for (InputSplit s : splits) {
            LOG.info("Processing input split s from " + Arrays.toString(s.getLocations()));
            RecordReader<NullWritable, Row> reader = format.getRecordReader(s, job, null);
            if (reader instanceof LlapRowRecordReader && first) {
                Schema schema = ((LlapRowRecordReader) reader).getSchema();
                System.out.println("" + schema);
            }
            if (first) {
                System.out.println("Results: ");
                System.out.println("");
                first = false;
            }
            Row value = reader.createValue();
            while (reader.next(NullWritable.get(), value)) {
                printRow(value);
            }
        }
        System.exit(0);
    }
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) Properties(java.util.Properties) NullWritable(org.apache.hadoop.io.NullWritable) HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit)

Aggregations

HelpFormatter (org.apache.commons.cli.HelpFormatter)273 Options (org.apache.commons.cli.Options)136 CommandLine (org.apache.commons.cli.CommandLine)126 CommandLineParser (org.apache.commons.cli.CommandLineParser)110 ParseException (org.apache.commons.cli.ParseException)103 GnuParser (org.apache.commons.cli.GnuParser)92 Path (org.apache.hadoop.fs.Path)42 PrintWriter (java.io.PrintWriter)35 Option (org.apache.commons.cli.Option)29 Job (org.apache.hadoop.mapreduce.Job)27 Configuration (org.apache.hadoop.conf.Configuration)21 File (java.io.File)17 IOException (java.io.IOException)14 DefaultParser (org.apache.commons.cli.DefaultParser)13 PosixParser (org.apache.commons.cli.PosixParser)12 FileSystem (org.apache.hadoop.fs.FileSystem)12 BasicParser (org.apache.commons.cli.BasicParser)11 ArrayList (java.util.ArrayList)8 URI (java.net.URI)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6