Search in sources :

Example 16 with GnuParser

use of org.apache.commons.cli.GnuParser in project hadoop by apache.

the class ApplicationMaster method init.

/**
   * Parse command line options
   *
   * @param args Command line args
   * @return Whether init successful and run should be invoked
   * @throws ParseException
   * @throws IOException
   */
public boolean init(String[] args) throws ParseException, IOException {
    Options opts = new Options();
    opts.addOption("app_attempt_id", true, "App Attempt ID. Not to be used unless for testing purposes");
    opts.addOption("shell_env", true, "Environment for shell script. Specified as env_key=env_val pairs");
    opts.addOption("container_memory", true, "Amount of memory in MB to be requested to run the shell command");
    opts.addOption("container_vcores", true, "Amount of virtual cores to be requested to run the shell command");
    opts.addOption("num_containers", true, "No. of containers on which the shell command needs to be executed");
    opts.addOption("priority", true, "Application Priority. Default 0");
    opts.addOption("container_retry_policy", true, "Retry policy when container fails to run, " + "0: NEVER_RETRY, 1: RETRY_ON_ALL_ERRORS, " + "2: RETRY_ON_SPECIFIC_ERROR_CODES");
    opts.addOption("container_retry_error_codes", true, "When retry policy is set to RETRY_ON_SPECIFIC_ERROR_CODES, error " + "codes is specified with this option, " + "e.g. --container_retry_error_codes 1,2,3");
    opts.addOption("container_max_retries", true, "If container could retry, it specifies max retires");
    opts.addOption("container_retry_interval", true, "Interval between each retry, unit is milliseconds");
    opts.addOption("debug", false, "Dump out debug information");
    opts.addOption("help", false, "Print usage");
    CommandLine cliParser = new GnuParser().parse(opts, args);
    if (args.length == 0) {
        printUsage(opts);
        throw new IllegalArgumentException("No args specified for application master to initialize");
    }
    //Check whether customer log4j.properties file exists
    if (fileExist(log4jPath)) {
        try {
            Log4jPropertyHelper.updateLog4jConfiguration(ApplicationMaster.class, log4jPath);
        } catch (Exception e) {
            LOG.warn("Can not set up custom log4j properties. " + e);
        }
    }
    if (cliParser.hasOption("help")) {
        printUsage(opts);
        return false;
    }
    if (cliParser.hasOption("debug")) {
        dumpOutDebugInfo();
    }
    Map<String, String> envs = System.getenv();
    if (!envs.containsKey(Environment.CONTAINER_ID.name())) {
        if (cliParser.hasOption("app_attempt_id")) {
            String appIdStr = cliParser.getOptionValue("app_attempt_id", "");
            appAttemptID = ApplicationAttemptId.fromString(appIdStr);
        } else {
            throw new IllegalArgumentException("Application Attempt Id not set in the environment");
        }
    } else {
        ContainerId containerId = ContainerId.fromString(envs.get(Environment.CONTAINER_ID.name()));
        appAttemptID = containerId.getApplicationAttemptId();
    }
    if (!envs.containsKey(ApplicationConstants.APP_SUBMIT_TIME_ENV)) {
        throw new RuntimeException(ApplicationConstants.APP_SUBMIT_TIME_ENV + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_HOST.name())) {
        throw new RuntimeException(Environment.NM_HOST.name() + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_HTTP_PORT.name())) {
        throw new RuntimeException(Environment.NM_HTTP_PORT + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_PORT.name())) {
        throw new RuntimeException(Environment.NM_PORT.name() + " not set in the environment");
    }
    LOG.info("Application master for app" + ", appId=" + appAttemptID.getApplicationId().getId() + ", clustertimestamp=" + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId=" + appAttemptID.getAttemptId());
    if (!fileExist(shellCommandPath) && envs.get(DSConstants.DISTRIBUTEDSHELLSCRIPTLOCATION).isEmpty()) {
        throw new IllegalArgumentException("No shell command or shell script specified to be executed by application master");
    }
    if (fileExist(shellCommandPath)) {
        shellCommand = readContent(shellCommandPath);
    }
    if (fileExist(shellArgsPath)) {
        shellArgs = readContent(shellArgsPath);
    }
    if (cliParser.hasOption("shell_env")) {
        String[] shellEnvs = cliParser.getOptionValues("shell_env");
        for (String env : shellEnvs) {
            env = env.trim();
            int index = env.indexOf('=');
            if (index == -1) {
                shellEnv.put(env, "");
                continue;
            }
            String key = env.substring(0, index);
            String val = "";
            if (index < (env.length() - 1)) {
                val = env.substring(index + 1);
            }
            shellEnv.put(key, val);
        }
    }
    if (envs.containsKey(DSConstants.DISTRIBUTEDSHELLSCRIPTLOCATION)) {
        scriptPath = envs.get(DSConstants.DISTRIBUTEDSHELLSCRIPTLOCATION);
        if (envs.containsKey(DSConstants.DISTRIBUTEDSHELLSCRIPTTIMESTAMP)) {
            shellScriptPathTimestamp = Long.parseLong(envs.get(DSConstants.DISTRIBUTEDSHELLSCRIPTTIMESTAMP));
        }
        if (envs.containsKey(DSConstants.DISTRIBUTEDSHELLSCRIPTLEN)) {
            shellScriptPathLen = Long.parseLong(envs.get(DSConstants.DISTRIBUTEDSHELLSCRIPTLEN));
        }
        if (!scriptPath.isEmpty() && (shellScriptPathTimestamp <= 0 || shellScriptPathLen <= 0)) {
            LOG.error("Illegal values in env for shell script path" + ", path=" + scriptPath + ", len=" + shellScriptPathLen + ", timestamp=" + shellScriptPathTimestamp);
            throw new IllegalArgumentException("Illegal values in env for shell script path");
        }
    }
    if (envs.containsKey(DSConstants.DISTRIBUTEDSHELLTIMELINEDOMAIN)) {
        domainId = envs.get(DSConstants.DISTRIBUTEDSHELLTIMELINEDOMAIN);
    }
    containerMemory = Integer.parseInt(cliParser.getOptionValue("container_memory", "10"));
    containerVirtualCores = Integer.parseInt(cliParser.getOptionValue("container_vcores", "1"));
    numTotalContainers = Integer.parseInt(cliParser.getOptionValue("num_containers", "1"));
    if (numTotalContainers == 0) {
        throw new IllegalArgumentException("Cannot run distributed shell with no containers");
    }
    requestPriority = Integer.parseInt(cliParser.getOptionValue("priority", "0"));
    containerRetryPolicy = ContainerRetryPolicy.values()[Integer.parseInt(cliParser.getOptionValue("container_retry_policy", "0"))];
    if (cliParser.hasOption("container_retry_error_codes")) {
        containerRetryErrorCodes = new HashSet<>();
        for (String errorCode : cliParser.getOptionValue("container_retry_error_codes").split(",")) {
            containerRetryErrorCodes.add(Integer.parseInt(errorCode));
        }
    }
    containerMaxRetries = Integer.parseInt(cliParser.getOptionValue("container_max_retries", "0"));
    containrRetryInterval = Integer.parseInt(cliParser.getOptionValue("container_retry_interval", "0"));
    if (YarnConfiguration.timelineServiceEnabled(conf)) {
        timelineServiceV2Enabled = ((int) YarnConfiguration.getTimelineServiceVersion(conf) == 2);
        timelineServiceV1Enabled = !timelineServiceV2Enabled;
    } else {
        timelineClient = null;
        timelineV2Client = null;
        LOG.warn("Timeline service is not enabled");
    }
    return true;
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) GnuParser(org.apache.commons.cli.GnuParser) URISyntaxException(java.net.URISyntaxException) ParseException(org.apache.commons.cli.ParseException) YarnException(org.apache.hadoop.yarn.exceptions.YarnException) IOException(java.io.IOException) UndeclaredThrowableException(java.lang.reflect.UndeclaredThrowableException) ClientHandlerException(com.sun.jersey.api.client.ClientHandlerException)

Example 17 with GnuParser

use of org.apache.commons.cli.GnuParser in project hbase by apache.

the class HMasterCommandLine method run.

public int run(String[] args) throws Exception {
    Options opt = new Options();
    opt.addOption("localRegionServers", true, "RegionServers to start in master process when running standalone");
    opt.addOption("masters", true, "Masters to start in this process");
    opt.addOption("minRegionServers", true, "Minimum RegionServers needed to host user tables");
    opt.addOption("backup", false, "Do not try to become HMaster until the primary fails");
    CommandLine cmd;
    try {
        cmd = new GnuParser().parse(opt, args);
    } catch (ParseException e) {
        LOG.error("Could not parse: ", e);
        usage(null);
        return 1;
    }
    if (cmd.hasOption("minRegionServers")) {
        String val = cmd.getOptionValue("minRegionServers");
        getConf().setInt("hbase.regions.server.count.min", Integer.parseInt(val));
        LOG.debug("minRegionServers set to " + val);
    }
    // minRegionServers used to be minServers.  Support it too.
    if (cmd.hasOption("minServers")) {
        String val = cmd.getOptionValue("minServers");
        getConf().setInt("hbase.regions.server.count.min", Integer.parseInt(val));
        LOG.debug("minServers set to " + val);
    }
    // check if we are the backup master - override the conf if so
    if (cmd.hasOption("backup")) {
        getConf().setBoolean(HConstants.MASTER_TYPE_BACKUP, true);
    }
    // master when we are in local/standalone mode. Useful testing)
    if (cmd.hasOption("localRegionServers")) {
        String val = cmd.getOptionValue("localRegionServers");
        getConf().setInt("hbase.regionservers", Integer.parseInt(val));
        LOG.debug("localRegionServers set to " + val);
    }
    // How many masters to startup inside this process; useful testing
    if (cmd.hasOption("masters")) {
        String val = cmd.getOptionValue("masters");
        getConf().setInt("hbase.masters", Integer.parseInt(val));
        LOG.debug("masters set to " + val);
    }
    @SuppressWarnings("unchecked") List<String> remainingArgs = cmd.getArgList();
    if (remainingArgs.size() != 1) {
        usage(null);
        return 1;
    }
    String command = remainingArgs.get(0);
    if ("start".equals(command)) {
        return startMaster();
    } else if ("stop".equals(command)) {
        return stopMaster();
    } else if ("clear".equals(command)) {
        return (ZNodeClearer.clear(getConf()) ? 0 : 1);
    } else {
        usage("Invalid command: " + command);
        return 1;
    }
}
Also used : Options(org.apache.commons.cli.Options) CommandLine(org.apache.commons.cli.CommandLine) ServerCommandLine(org.apache.hadoop.hbase.util.ServerCommandLine) GnuParser(org.apache.commons.cli.GnuParser) ParseException(org.apache.commons.cli.ParseException)

Example 18 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class StreamingIntegrationTester method main.

public static void main(String[] args) {
    try {
        LogUtils.initHiveLog4j();
    } catch (LogUtils.LogInitializationException e) {
        System.err.println("Unable to initialize log4j " + StringUtils.stringifyException(e));
        System.exit(-1);
    }
    Options options = new Options();
    options.addOption(OptionBuilder.hasArg().withArgName("abort-pct").withDescription("Percentage of transactions to abort, defaults to 5").withLongOpt("abortpct").create('a'));
    options.addOption(OptionBuilder.hasArgs().withArgName("column-names").withDescription("column names of table to write to").withLongOpt("columns").withValueSeparator(',').isRequired().create('c'));
    options.addOption(OptionBuilder.hasArg().withArgName("database").withDescription("Database of table to write to").withLongOpt("database").isRequired().create('d'));
    options.addOption(OptionBuilder.hasArg().withArgName("frequency").withDescription("How often to commit a transaction, in seconds, defaults to 1").withLongOpt("frequency").create('f'));
    options.addOption(OptionBuilder.hasArg().withArgName("iterations").withDescription("Number of batches to write, defaults to 10").withLongOpt("num-batches").create('i'));
    options.addOption(OptionBuilder.hasArg().withArgName("metastore-uri").withDescription("URI of Hive metastore").withLongOpt("metastore-uri").isRequired().create('m'));
    options.addOption(OptionBuilder.hasArg().withArgName("num_transactions").withDescription("Number of transactions per batch, defaults to 100").withLongOpt("num-txns").create('n'));
    options.addOption(OptionBuilder.hasArgs().withArgName("partition-values").withDescription("partition values, must be provided in order of partition columns, " + "if not provided table is assumed to not be partitioned").withLongOpt("partition").withValueSeparator(',').create('p'));
    options.addOption(OptionBuilder.hasArg().withArgName("records-per-transaction").withDescription("records to write in each transaction, defaults to 100").withLongOpt("records-per-txn").withValueSeparator(',').create('r'));
    options.addOption(OptionBuilder.hasArgs().withArgName("column-types").withDescription("column types, valid values are string, int, float, decimal, date, " + "datetime").withLongOpt("schema").withValueSeparator(',').isRequired().create('s'));
    options.addOption(OptionBuilder.hasArg().withArgName("table").withDescription("Table to write to").withLongOpt("table").isRequired().create('t'));
    options.addOption(OptionBuilder.hasArg().withArgName("num-writers").withDescription("Number of writers to create, defaults to 2").withLongOpt("writers").create('w'));
    options.addOption(OptionBuilder.hasArg(false).withArgName("pause").withDescription("Wait on keyboard input after commit & batch close. default: disabled").withLongOpt("pause").create('x'));
    Parser parser = new GnuParser();
    CommandLine cmdline = null;
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        usage(options);
    }
    boolean pause = cmdline.hasOption('x');
    String db = cmdline.getOptionValue('d');
    String table = cmdline.getOptionValue('t');
    String uri = cmdline.getOptionValue('m');
    int txnsPerBatch = Integer.parseInt(cmdline.getOptionValue('n', "100"));
    int writers = Integer.parseInt(cmdline.getOptionValue('w', "2"));
    int batches = Integer.parseInt(cmdline.getOptionValue('i', "10"));
    int recordsPerTxn = Integer.parseInt(cmdline.getOptionValue('r', "100"));
    int frequency = Integer.parseInt(cmdline.getOptionValue('f', "1"));
    int ap = Integer.parseInt(cmdline.getOptionValue('a', "5"));
    float abortPct = ((float) ap) / 100.0f;
    String[] partVals = cmdline.getOptionValues('p');
    String[] cols = cmdline.getOptionValues('c');
    String[] types = cmdline.getOptionValues('s');
    StreamingIntegrationTester sit = new StreamingIntegrationTester(db, table, uri, txnsPerBatch, writers, batches, recordsPerTxn, frequency, abortPct, partVals, cols, types, pause);
    sit.go();
}
Also used : Options(org.apache.commons.cli.Options) GnuParser(org.apache.commons.cli.GnuParser) Parser(org.apache.commons.cli.Parser) GnuParser(org.apache.commons.cli.GnuParser) CommandLine(org.apache.commons.cli.CommandLine) LogUtils(org.apache.hadoop.hive.common.LogUtils) ParseException(org.apache.commons.cli.ParseException)

Example 19 with GnuParser

use of org.apache.commons.cli.GnuParser in project hive by apache.

the class HiveMetaTool method main.

public static void main(String[] args) {
    HiveMetaTool metaTool = new HiveMetaTool();
    metaTool.init();
    CommandLineParser parser = new GnuParser();
    CommandLine line = null;
    try {
        try {
            line = parser.parse(metaTool.cmdLineOptions, args);
        } catch (ParseException e) {
            System.err.println("HiveMetaTool:Parsing failed.  Reason: " + e.getLocalizedMessage());
            printAndExit(metaTool);
        }
        if (line.hasOption("help")) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("metatool", metaTool.cmdLineOptions);
        } else if (line.hasOption("listFSRoot")) {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
                printAndExit(metaTool);
            }
            metaTool.listFSRoot();
        } else if (line.hasOption("executeJDOQL")) {
            String query = line.getOptionValue("executeJDOQL");
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
                printAndExit(metaTool);
            }
            if (query.toLowerCase().trim().startsWith("select")) {
                metaTool.executeJDOQLSelect(query);
            } else if (query.toLowerCase().trim().startsWith("update")) {
                metaTool.executeJDOQLUpdate(query);
            } else {
                System.err.println("HiveMetaTool:Unsupported statement type");
                printAndExit(metaTool);
            }
        } else if (line.hasOption("updateLocation")) {
            String[] loc = line.getOptionValues("updateLocation");
            boolean isDryRun = false;
            String serdepropKey = null;
            String tablePropKey = null;
            if (loc.length != 2 && loc.length != 3) {
                System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + "optional arguments but " + "was passed " + loc.length + " arguments");
                printAndExit(metaTool);
            }
            Path newPath = new Path(loc[0]);
            Path oldPath = new Path(loc[1]);
            URI oldURI = oldPath.toUri();
            URI newURI = newPath.toUri();
            if (line.hasOption("dryRun")) {
                isDryRun = true;
            }
            if (line.hasOption("serdePropKey")) {
                serdepropKey = line.getOptionValue("serdePropKey");
            }
            if (line.hasOption("tablePropKey")) {
                tablePropKey = line.getOptionValue("tablePropKey");
            }
            /*
         * validate input - Both new and old URI should contain valid host names and valid schemes.
         * port is optional in both the URIs since HDFS HA NN URI doesn't have a port.
         */
            if (oldURI.getHost() == null || newURI.getHost() == null) {
                System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
            } else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
                System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
            } else {
                metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
            }
        } else {
            if (line.hasOption("dryRun")) {
                System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
            } else if (line.hasOption("serdePropKey")) {
                System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
            } else if (line.hasOption("tablePropKey")) {
                System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
                printAndExit(metaTool);
            } else {
                System.err.print("HiveMetaTool:Parsing failed.  Reason: Invalid arguments: ");
                for (String s : line.getArgs()) {
                    System.err.print(s + " ");
                }
                System.err.println();
            }
            printAndExit(metaTool);
        }
    } finally {
        metaTool.shutdownObjectStore();
    }
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) Path(org.apache.hadoop.fs.Path) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException) URI(java.net.URI)

Example 20 with GnuParser

use of org.apache.commons.cli.GnuParser in project hbase by apache.

the class TestJoinedScanners method main.

/**
   * Command line interface:
   * @param args
   * @throws IOException if there is a bug while reading from disk
   */
public static void main(final String[] args) throws Exception {
    Option encodingOption = new Option("e", "blockEncoding", true, "Data block encoding; Default: FAST_DIFF");
    encodingOption.setRequired(false);
    options.addOption(encodingOption);
    Option ratioOption = new Option("r", "selectionRatio", true, "Ratio of selected rows using essential column family");
    ratioOption.setRequired(false);
    options.addOption(ratioOption);
    Option widthOption = new Option("w", "valueWidth", true, "Width of value for non-essential column family");
    widthOption.setRequired(false);
    options.addOption(widthOption);
    CommandLineParser parser = new GnuParser();
    CommandLine cmd = parser.parse(options, args);
    if (args.length < 1) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("TestJoinedScanners", options, true);
    }
    if (cmd.hasOption("e")) {
        blockEncoding = DataBlockEncoding.valueOf(cmd.getOptionValue("e"));
    }
    if (cmd.hasOption("r")) {
        selectionRatio = Integer.parseInt(cmd.getOptionValue("r"));
    }
    if (cmd.hasOption("w")) {
        valueWidth = Integer.parseInt(cmd.getOptionValue("w"));
    }
    // run the test
    TestJoinedScanners test = new TestJoinedScanners();
    test.testJoinedScanners();
}
Also used : HelpFormatter(org.apache.commons.cli.HelpFormatter) CommandLine(org.apache.commons.cli.CommandLine) GnuParser(org.apache.commons.cli.GnuParser) Option(org.apache.commons.cli.Option) CommandLineParser(org.apache.commons.cli.CommandLineParser)

Aggregations

GnuParser (org.apache.commons.cli.GnuParser)208 CommandLine (org.apache.commons.cli.CommandLine)187 Options (org.apache.commons.cli.Options)165 CommandLineParser (org.apache.commons.cli.CommandLineParser)158 ParseException (org.apache.commons.cli.ParseException)139 HelpFormatter (org.apache.commons.cli.HelpFormatter)92 Path (org.apache.hadoop.fs.Path)40 Option (org.apache.commons.cli.Option)39 IOException (java.io.IOException)32 Job (org.apache.hadoop.mapreduce.Job)27 File (java.io.File)24 Configuration (org.apache.hadoop.conf.Configuration)19 FileInputStream (java.io.FileInputStream)14 ArrayList (java.util.ArrayList)14 Properties (java.util.Properties)13 FileSystem (org.apache.hadoop.fs.FileSystem)11 MissingArgumentException (org.apache.commons.cli.MissingArgumentException)9 FileNotFoundException (java.io.FileNotFoundException)7 URI (java.net.URI)7 URISyntaxException (java.net.URISyntaxException)6