use of org.apache.commons.cli.Option in project hadoop by apache.
the class NodeInfo method main.
@SuppressWarnings("static-access")
public static void main(String[] args) throws IOException {
GenericOptionsParser genericParser = new GenericOptionsParser(args);
String[] remainingArgs = genericParser.getRemainingArgs();
Option conf = OptionBuilder.hasArg().create("conffile");
Option help = OptionBuilder.withLongOpt("help").create('h');
Options opts = new Options().addOption(conf).addOption(help);
CommandLineParser specificParser = new GnuParser();
CommandLine cmd = null;
try {
cmd = specificParser.parse(opts, remainingArgs);
} catch (MissingArgumentException e) {
terminate(1, "No argument specified for -conffile option");
} catch (ParseException e) {
terminate(1, USAGE);
}
if (cmd == null) {
terminate(1, "Failed to parse options");
}
if (cmd.hasOption('h')) {
terminate(0, USAGE);
}
List<File> files = new ArrayList<File>();
if (cmd.hasOption("conffile")) {
String[] values = cmd.getOptionValues("conffile");
for (String value : values) {
File confFile = new File(value);
if (confFile.isFile()) {
files.add(confFile);
} else if (confFile.isDirectory()) {
for (File file : listFiles(confFile)) {
files.add(file);
}
} else {
terminate(1, confFile.getAbsolutePath() + " is neither a file nor directory");
}
}
} else {
String confDirName = System.getenv(HADOOP_CONF_DIR);
if (confDirName == null) {
terminate(1, HADOOP_CONF_DIR + " is not defined");
}
File confDir = new File(confDirName);
if (!confDir.isDirectory()) {
terminate(1, HADOOP_CONF_DIR + " is not a directory");
}
files = Arrays.asList(listFiles(confDir));
}
if (files.isEmpty()) {
terminate(1, "No input file to validate");
}
boolean ok = true;
for (File file : files) {
String path = file.getAbsolutePath();
List<String> errors = checkConf(new FileInputStream(file));
if (errors.isEmpty()) {
System.out.println(path + ": valid");
} else {
ok = false;
System.err.println(path + ":");
for (String error : errors) {
System.err.println("\t" + error);
}
}
}
if (ok) {
System.out.println("OK");
} else {
terminate(1, "Invalid file exists");
}
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class TestGenericOptionsParser method testCreateWithOptions.
/**
* Test that options passed to the constructor are used.
*/
@SuppressWarnings("static-access")
@Test
public void testCreateWithOptions() throws Exception {
// Create new option newOpt
Option opt = OptionBuilder.withArgName("int").hasArg().withDescription("A new option").create("newOpt");
Options opts = new Options();
opts.addOption(opt);
// Check newOpt is actually used to parse the args
String[] args = new String[2];
args[0] = "--newOpt";
args[1] = "7";
GenericOptionsParser g = new GenericOptionsParser(opts, args);
assertEquals("New option was ignored", "7", g.getCommandLine().getOptionValues("newOpt")[0]);
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class JMXGet method parseArgs.
/**
* parse args
*/
private static CommandLine parseArgs(Options opts, String... args) throws IllegalArgumentException {
OptionBuilder.withArgName("NameNode|DataNode");
OptionBuilder.hasArg();
OptionBuilder.withDescription("specify jmx service (NameNode by default)");
Option jmx_service = OptionBuilder.create("service");
OptionBuilder.withArgName("mbean server");
OptionBuilder.hasArg();
OptionBuilder.withDescription("specify mbean server (localhost by default)");
Option jmx_server = OptionBuilder.create("server");
OptionBuilder.withDescription("print help");
Option jmx_help = OptionBuilder.create("help");
OptionBuilder.withArgName("mbean server port");
OptionBuilder.hasArg();
OptionBuilder.withDescription("specify mbean server port, " + "if missing - it will try to connect to MBean Server in the same VM");
Option jmx_port = OptionBuilder.create("port");
OptionBuilder.withArgName("VM's connector url");
OptionBuilder.hasArg();
OptionBuilder.withDescription("connect to the VM on the same machine;" + "\n use:\n jstat -J-Djstat.showUnsupported=true -snap <vmpid> | " + "grep sun.management.JMXConnectorServer.address\n " + "to find the url");
Option jmx_localVM = OptionBuilder.create("localVM");
opts.addOption(jmx_server);
opts.addOption(jmx_help);
opts.addOption(jmx_service);
opts.addOption(jmx_port);
opts.addOption(jmx_localVM);
CommandLine commandLine = null;
CommandLineParser parser = new GnuParser();
try {
commandLine = parser.parse(opts, args, true);
} catch (ParseException e) {
printUsage(opts);
throw new IllegalArgumentException("invalid args: " + e.getMessage());
}
return commandLine;
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class DiskBalancerCLI method addPlanCommands.
/**
* Adds commands for plan command.
*
* @return Options.
*/
private void addPlanCommands(Options opt) {
Option plan = OptionBuilder.withLongOpt(PLAN).withDescription("Hostname, IP address or UUID of datanode " + "for which a plan is created.").hasArg().create();
getPlanOptions().addOption(plan);
opt.addOption(plan);
Option outFile = OptionBuilder.withLongOpt(OUTFILE).hasArg().withDescription("Local path of file to write output to, if not specified " + "defaults will be used.").create();
getPlanOptions().addOption(outFile);
opt.addOption(outFile);
Option bandwidth = OptionBuilder.withLongOpt(BANDWIDTH).hasArg().withDescription("Maximum disk bandwidth (MB/s) in integer to be consumed by " + "diskBalancer. e.g. 10 MB/s.").create();
getPlanOptions().addOption(bandwidth);
opt.addOption(bandwidth);
Option threshold = OptionBuilder.withLongOpt(THRESHOLD).hasArg().withDescription("Percentage of data skew that is tolerated before" + " disk balancer starts working. For example, if" + " total data on a 2 disk node is 100 GB then disk" + " balancer calculates the expected value on each disk," + " which is 50 GB. If the tolerance is 10% then data" + " on a single disk needs to be more than 60 GB" + " (50 GB + 10% tolerance value) for Disk balancer to" + " balance the disks.").create();
getPlanOptions().addOption(threshold);
opt.addOption(threshold);
Option maxError = OptionBuilder.withLongOpt(MAXERROR).hasArg().withDescription("Describes how many errors " + "can be tolerated while copying between a pair of disks.").create();
getPlanOptions().addOption(maxError);
opt.addOption(maxError);
Option verbose = OptionBuilder.withLongOpt(VERBOSE).withDescription("Print out the summary of the plan on console").create();
getPlanOptions().addOption(verbose);
opt.addOption(verbose);
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class ArgumentParser method getOptions.
/**
* @return the option set to be used in command line parsing
*/
private Options getOptions() {
Options cliopt = new Options();
cliopt.addOption(ConfigOption.MAPS);
cliopt.addOption(ConfigOption.REDUCES);
cliopt.addOption(ConfigOption.PACKET_SIZE);
cliopt.addOption(ConfigOption.OPS);
cliopt.addOption(ConfigOption.DURATION);
cliopt.addOption(ConfigOption.EXIT_ON_ERROR);
cliopt.addOption(ConfigOption.SLEEP_TIME);
cliopt.addOption(ConfigOption.TRUNCATE_WAIT);
cliopt.addOption(ConfigOption.FILES);
cliopt.addOption(ConfigOption.DIR_SIZE);
cliopt.addOption(ConfigOption.BASE_DIR);
cliopt.addOption(ConfigOption.RESULT_FILE);
cliopt.addOption(ConfigOption.CLEANUP);
{
String[] distStrs = new String[Distribution.values().length];
Distribution[] distValues = Distribution.values();
for (int i = 0; i < distValues.length; ++i) {
distStrs[i] = distValues[i].lowerName();
}
String opdesc = String.format(Constants.OP_DESCR, StringUtils.arrayToString(distStrs));
for (OperationType type : OperationType.values()) {
String opname = type.lowerName();
cliopt.addOption(new Option(opname, true, opdesc));
}
}
cliopt.addOption(ConfigOption.REPLICATION_AM);
cliopt.addOption(ConfigOption.BLOCK_SIZE);
cliopt.addOption(ConfigOption.READ_SIZE);
cliopt.addOption(ConfigOption.WRITE_SIZE);
cliopt.addOption(ConfigOption.APPEND_SIZE);
cliopt.addOption(ConfigOption.TRUNCATE_SIZE);
cliopt.addOption(ConfigOption.RANDOM_SEED);
cliopt.addOption(ConfigOption.QUEUE_NAME);
cliopt.addOption(ConfigOption.HELP);
return cliopt;
}
Aggregations