Search in sources :

Example 21 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class Arguments method parse.

/**
   * Parse the command line arguments
   */
public boolean parse(String[] args) {
    try {
        commandLine = new GnuParser().parse(options, args);
        execString = commandLine.getOptionValue('e');
        fileName = commandLine.getOptionValue('f');
        main = commandLine.getOptionValue("main");
        Properties p = commandLine.getOptionProperties("hiveconf");
        for (String key : p.stringPropertyNames()) {
            vars.put(key, p.getProperty(key));
        }
        p = commandLine.getOptionProperties("hivevar");
        for (String key : p.stringPropertyNames()) {
            vars.put(key, p.getProperty(key));
        }
        p = commandLine.getOptionProperties("define");
        for (String key : p.stringPropertyNames()) {
            vars.put(key, p.getProperty(key));
        }
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        return false;
    }
    return true;
}
Also used : GnuParser(org.apache.commons.cli.GnuParser) ParseException(org.apache.commons.cli.ParseException) Properties(java.util.Properties)

Example 22 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class LlapStatusOptionsProcessor method processOptions.

public LlapStatusOptions processOptions(String[] args) throws ParseException {
    commandLine = new GnuParser().parse(options, args);
    if (commandLine.hasOption(OptionConstants.HELP.getShortOpt())) {
        printUsage();
        return null;
    }
    String name = commandLine.getOptionValue(OptionConstants.NAME.getLongOpt());
    long findAppTimeoutMs = FIND_YARN_APP_TIMEOUT_MS;
    if (commandLine.hasOption(OptionConstants.FIND_APP_TIMEOUT.getLongOpt())) {
        findAppTimeoutMs = TimeUnit.MILLISECONDS.convert(Long.parseLong(commandLine.getOptionValue(OptionConstants.FIND_APP_TIMEOUT.getLongOpt())), TimeUnit.SECONDS);
    }
    Properties hiveConf;
    if (commandLine.hasOption(OptionConstants.HIVECONF.getLongOpt())) {
        hiveConf = commandLine.getOptionProperties(OptionConstants.HIVECONF.getLongOpt());
    } else {
        hiveConf = new Properties();
    }
    String outputFile = null;
    if (commandLine.hasOption(OptionConstants.OUTPUT_FILE.getLongOpt())) {
        outputFile = commandLine.getOptionValue(OptionConstants.OUTPUT_FILE.getLongOpt());
    }
    long refreshIntervalMs = DEFAULT_STATUS_REFRESH_INTERVAL_MS;
    if (commandLine.hasOption(OptionConstants.STATUS_REFRESH_INTERVAL.getLongOpt())) {
        long refreshIntervalSec = Long.parseLong(commandLine.getOptionValue(OptionConstants.STATUS_REFRESH_INTERVAL.getLongOpt()));
        if (refreshIntervalSec <= 0) {
            throw new IllegalArgumentException("Refresh interval should be >0");
        }
        refreshIntervalMs = TimeUnit.MILLISECONDS.convert(refreshIntervalSec, TimeUnit.SECONDS);
    }
    boolean watchMode = commandLine.hasOption(OptionConstants.WATCH_MODE.getLongOpt()) ? true : false;
    long watchTimeoutMs = DEFAULT_WATCH_MODE_TIMEOUT_MS;
    if (commandLine.hasOption(OptionConstants.WATCH_MODE_TIMEOUT.getLongOpt())) {
        long watchTimeoutSec = Long.parseLong(commandLine.getOptionValue(OptionConstants.WATCH_MODE_TIMEOUT.getLongOpt()));
        if (watchTimeoutSec <= 0) {
            throw new IllegalArgumentException("Watch timeout should be >0");
        }
        watchTimeoutMs = TimeUnit.MILLISECONDS.convert(watchTimeoutSec, TimeUnit.SECONDS);
    }
    float runningNodesThreshold = DEFAULT_RUNNING_NODES_THRESHOLD;
    if (commandLine.hasOption(OptionConstants.RUNNING_NODES_THRESHOLD.getLongOpt())) {
        runningNodesThreshold = Float.parseFloat(commandLine.getOptionValue(OptionConstants.RUNNING_NODES_THRESHOLD.getLongOpt()));
        if (runningNodesThreshold < 0.0f || runningNodesThreshold > 1.0f) {
            throw new IllegalArgumentException("Running nodes threshold value should be between 0.0 and 1.0 (inclusive)");
        }
    }
    return new LlapStatusOptions(name, hiveConf, findAppTimeoutMs, outputFile, refreshIntervalMs, watchMode, watchTimeoutMs, runningNodesThreshold);
}
Also used : GnuParser(org.apache.commons.cli.GnuParser) Properties(java.util.Properties)

Example 23 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class LlapDump method main.

public static void main(String[] args) throws Exception {
    Options opts = createOptions();
    CommandLine cli = new GnuParser().parse(opts, args);
    if (cli.hasOption('h')) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("llapdump", opts);
        return;
    }
    if (cli.hasOption('l')) {
        url = cli.getOptionValue("l");
    }
    if (cli.hasOption('u')) {
        user = cli.getOptionValue("u");
    }
    if (cli.hasOption('p')) {
        pwd = cli.getOptionValue("p");
    }
    if (cli.hasOption('n')) {
        numSplits = cli.getOptionValue("n");
    }
    Properties configProps = cli.getOptionProperties("hiveconf");
    if (cli.getArgs().length > 0) {
        query = cli.getArgs()[0];
    }
    if (query == null) {
        throw new IllegalArgumentException("No query string specified");
    }
    System.out.println("url: " + url);
    System.out.println("user: " + user);
    System.out.println("query: " + query);
    LlapRowInputFormat format = new LlapRowInputFormat();
    JobConf job = new JobConf();
    job.set(LlapBaseInputFormat.URL_KEY, url);
    job.set(LlapBaseInputFormat.USER_KEY, user);
    job.set(LlapBaseInputFormat.PWD_KEY, pwd);
    job.set(LlapBaseInputFormat.QUERY_KEY, query);
    // Additional conf settings specified on the command line
    for (String key : configProps.stringPropertyNames()) {
        job.set(key, configProps.getProperty(key));
    }
    InputSplit[] splits = format.getSplits(job, Integer.parseInt(numSplits));
    if (splits.length == 0) {
        System.out.println("No splits returned - empty scan");
        System.out.println("Results: ");
    } else {
        boolean first = true;
        for (InputSplit s : splits) {
            LOG.info("Processing input split s from " + Arrays.toString(s.getLocations()));
            RecordReader<NullWritable, Row> reader = format.getRecordReader(s, job, null);
            if (reader instanceof LlapRowRecordReader && first) {
                Schema schema = ((LlapRowRecordReader) reader).getSchema();
                System.out.println("" + schema);
            }
            if (first) {
                System.out.println("Results: ");
                System.out.println("");
                first = false;
            }
            Row value = reader.createValue();
            while (reader.next(NullWritable.get(), value)) {
                printRow(value);
            }
        }
        System.exit(0);
    }
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) Properties(java.util.Properties) NullWritable(org.apache.hadoop.io.NullWritable) HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) JobConf(org.apache.hadoop.mapred.JobConf) InputSplit(org.apache.hadoop.mapred.InputSplit)

Example 24 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class HBaseSchemaTool method main.

public static void main(String[] args) {
    Options options = new Options();
    options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
    options.addOption(OptionBuilder.withLongOpt("install").withDescription("Install the schema onto an HBase cluster.").create('i'));
    options.addOption(OptionBuilder.withLongOpt("key").withDescription("Key to scan with.  This should be an exact key (not a regular expression").hasArg().create('k'));
    options.addOption(OptionBuilder.withLongOpt("list-tables").withDescription("List tables in HBase metastore").create('l'));
    options.addOption(OptionBuilder.withLongOpt("regex-key").withDescription("Regular expression to scan keys with.").hasArg().create('r'));
    options.addOption(OptionBuilder.withLongOpt("table").withDescription("HBase metastore table to scan").hasArg().create('t'));
    CommandLine cli = null;
    try {
        cli = new GnuParser().parse(options, args);
    } catch (ParseException e) {
        System.err.println("Parse Exception: " + e.getMessage());
        usage(options);
        return;
    }
    if (cli.hasOption('h')) {
        usage(options);
        return;
    }
    Configuration conf = new Configuration();
    if (cli.hasOption('i')) {
        new HBaseSchemaTool().install(conf, System.err);
        return;
    }
    String key = null;
    if (cli.hasOption('k'))
        key = cli.getOptionValue('k');
    String regex = null;
    if (cli.hasOption('r'))
        regex = cli.getOptionValue('r');
    if (key != null && regex != null) {
        usage(options);
        return;
    }
    if (key == null && regex == null)
        regex = ".*";
    // I do this in the object rather than in the static main so that it's easier to test.
    new HBaseSchemaTool().go(cli.hasOption('l'), cli.getOptionValue('t'), key, regex, conf, System.out, System.err);
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) Configuration(org.apache.hadoop.conf.Configuration) GnuParser(org.apache.commons.cli.GnuParser) ParseException(org.apache.commons.cli.ParseException)

Example 25 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class HBaseImport method init.

private int init(String... args) throws ParseException {
    Options options = new Options();
    doAll = doKerberos = false;
    parallel = 1;
    batchSize = 1000;
    options.addOption(OptionBuilder.withLongOpt("all").withDescription("Import the full metastore").create('a'));
    options.addOption(OptionBuilder.withLongOpt("batchsize").withDescription("Number of partitions to read and write in a batch, defaults to 1000").hasArg().create('b'));
    options.addOption(OptionBuilder.withLongOpt("database").withDescription("Import a single database").hasArgs().create('d'));
    options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
    options.addOption(OptionBuilder.withLongOpt("function").withDescription("Import a single function").hasArgs().create('f'));
    options.addOption(OptionBuilder.withLongOpt("kerberos").withDescription("Import all kerberos related objects (master key, tokens)").create('k'));
    options.addOption(OptionBuilder.withLongOpt("parallel").withDescription("Parallel factor for loading (only applied to tables and partitions), " + "defaults to 1").hasArg().create('p'));
    options.addOption(OptionBuilder.withLongOpt("role").withDescription("Import a single role").hasArgs().create('r'));
    options.addOption(OptionBuilder.withLongOpt("tables").withDescription("Import a single tables").hasArgs().create('t'));
    CommandLine cli = new GnuParser().parse(options, args);
    // Process help, if it was asked for, this must be done first
    if (cli.hasOption('h')) {
        printHelp(options);
        return 1;
    }
    boolean hasCmd = false;
    // Now process the other command line args
    if (cli.hasOption('a')) {
        hasCmd = true;
        doAll = true;
    }
    if (cli.hasOption('b')) {
        batchSize = Integer.parseInt(cli.getOptionValue('b'));
    }
    if (cli.hasOption('d')) {
        hasCmd = true;
        dbsToImport = Arrays.asList(cli.getOptionValues('d'));
    }
    if (cli.hasOption('f')) {
        hasCmd = true;
        functionsToImport = Arrays.asList(cli.getOptionValues('f'));
    }
    if (cli.hasOption('p')) {
        parallel = Integer.parseInt(cli.getOptionValue('p'));
    }
    if (cli.hasOption('r')) {
        hasCmd = true;
        rolesToImport = Arrays.asList(cli.getOptionValues('r'));
    }
    if (cli.hasOption('k')) {
        doKerberos = true;
    }
    if (cli.hasOption('t')) {
        hasCmd = true;
        tablesToImport = Arrays.asList(cli.getOptionValues('t'));
    }
    if (!hasCmd) {
        printHelp(options);
        return 1;
    }
    dbs = new ArrayList<>();
    // We don't want to bound the size of the table queue because we keep it all in memory
    partitionedTables = new LinkedBlockingQueue<>();
    tableNameQueue = new LinkedBlockingQueue<>();
    indexNameQueue = new LinkedBlockingQueue<>();
    // Bound the size of this queue so we don't get too much in memory.
    partQueue = new ArrayBlockingQueue<>(parallel * 2);
    return 0;
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser)

Aggregations

GnuParser (org.apache.commons.cli.GnuParser)208 CommandLine (org.apache.commons.cli.CommandLine)187 Options (org.apache.commons.cli.Options)165 CommandLineParser (org.apache.commons.cli.CommandLineParser)158 ParseException (org.apache.commons.cli.ParseException)139 HelpFormatter (org.apache.commons.cli.HelpFormatter)92 Path (org.apache.hadoop.fs.Path)40 Option (org.apache.commons.cli.Option)39 IOException (java.io.IOException)32 Job (org.apache.hadoop.mapreduce.Job)27 File (java.io.File)24 Configuration (org.apache.hadoop.conf.Configuration)19 FileInputStream (java.io.FileInputStream)14 ArrayList (java.util.ArrayList)14 Properties (java.util.Properties)13 FileSystem (org.apache.hadoop.fs.FileSystem)11 MissingArgumentException (org.apache.commons.cli.MissingArgumentException)9 FileNotFoundException (java.io.FileNotFoundException)7 URI (java.net.URI)7 URISyntaxException (java.net.URISyntaxException)6