use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class RegistryCli method rm.
@SuppressWarnings("unchecked")
public int rm(String[] args) {
Option recursive = OptionBuilder.withArgName("recursive").withDescription("delete recursively").create("r");
Options rmOption = new Options();
rmOption.addOption(recursive);
boolean recursiveOpt = false;
CommandLineParser parser = new GnuParser();
try {
CommandLine line = parser.parse(rmOption, args);
List<String> argsList = line.getArgList();
if (argsList.size() != 2) {
return usageError("RM requires exactly one path argument", RM_USAGE);
}
if (!validatePath(argsList.get(1))) {
return -1;
}
try {
if (line.hasOption("r")) {
recursiveOpt = true;
}
registry.delete(argsList.get(1), recursiveOpt);
return 0;
} catch (Exception e) {
syserr.println(analyzeException("rm", e, argsList));
}
return -1;
} catch (ParseException exp) {
return usageError("Invalid syntax " + exp.toString(), RM_USAGE);
}
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class RegistryCli method resolve.
@SuppressWarnings("unchecked")
public int resolve(String[] args) {
Options resolveOption = new Options();
CommandLineParser parser = new GnuParser();
try {
CommandLine line = parser.parse(resolveOption, args);
List<String> argsList = line.getArgList();
if (argsList.size() != 2) {
return usageError("resolve requires exactly one path argument", RESOLVE_USAGE);
}
if (!validatePath(argsList.get(1))) {
return -1;
}
try {
ServiceRecord record = registry.resolve(argsList.get(1));
for (Endpoint endpoint : record.external) {
sysout.println(" Endpoint(ProtocolType=" + endpoint.protocolType + ", Api=" + endpoint.api + ");" + " Addresses(AddressType=" + endpoint.addressType + ") are: ");
for (Map<String, String> address : endpoint.addresses) {
sysout.println("[ ");
for (Map.Entry<String, String> entry : address.entrySet()) {
sysout.print("\t" + entry.getKey() + ":" + entry.getValue());
}
sysout.println("\n]");
}
sysout.println();
}
return 0;
} catch (Exception e) {
syserr.println(analyzeException("resolve", e, argsList));
}
return -1;
} catch (ParseException exp) {
return usageError("Invalid syntax " + exp, RESOLVE_USAGE);
}
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class SLSRunner method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
options.addOption("inputrumen", true, "input rumen files");
options.addOption("inputsls", true, "input sls files");
options.addOption("nodes", true, "input topology");
options.addOption("output", true, "output directory");
options.addOption("trackjobs", true, "jobs to be tracked during simulating");
options.addOption("printsimulation", false, "print out simulation information");
CommandLineParser parser = new GnuParser();
CommandLine cmd = parser.parse(options, args);
String inputRumen = cmd.getOptionValue("inputrumen");
String inputSLS = cmd.getOptionValue("inputsls");
String output = cmd.getOptionValue("output");
if ((inputRumen == null && inputSLS == null) || output == null) {
System.err.println();
System.err.println("ERROR: Missing input or output file");
System.err.println();
System.err.println("Options: -inputrumen|-inputsls FILE,FILE... " + "-output FILE [-nodes FILE] [-trackjobs JobId,JobId...] " + "[-printsimulation]");
System.err.println();
System.exit(1);
}
File outputFile = new File(output);
if (!outputFile.exists() && !outputFile.mkdirs()) {
System.err.println("ERROR: Cannot create output directory " + outputFile.getAbsolutePath());
System.exit(1);
}
Set<String> trackedJobSet = new HashSet<String>();
if (cmd.hasOption("trackjobs")) {
String trackjobs = cmd.getOptionValue("trackjobs");
String[] jobIds = trackjobs.split(",");
trackedJobSet.addAll(Arrays.asList(jobIds));
}
String nodeFile = cmd.hasOption("nodes") ? cmd.getOptionValue("nodes") : "";
boolean isSLS = inputSLS != null;
String[] inputFiles = isSLS ? inputSLS.split(",") : inputRumen.split(",");
SLSRunner sls = new SLSRunner(isSLS, inputFiles, nodeFile, output, trackedJobSet, cmd.hasOption("printsimulation"));
sls.start();
}
use of org.apache.commons.cli.CommandLineParser in project hbase by apache.
the class WALPrettyPrinter method run.
/**
* Pass one or more log file names and formatting options and it will dump out
* a text version of the contents on <code>stdout</code>.
*
* @param args
* Command line arguments
* @throws IOException
* Thrown upon file system errors etc.
*/
public static void run(String[] args) throws IOException {
// create options
Options options = new Options();
options.addOption("h", "help", false, "Output help message");
options.addOption("j", "json", false, "Output JSON");
options.addOption("p", "printvals", false, "Print values");
options.addOption("r", "region", true, "Region to filter by. Pass encoded region name; e.g. '9192caead6a5a20acb4454ffbc79fa14'");
options.addOption("s", "sequence", true, "Sequence to filter by. Pass sequence number.");
options.addOption("w", "row", true, "Row to filter by. Pass row name.");
WALPrettyPrinter printer = new WALPrettyPrinter();
CommandLineParser parser = new PosixParser();
List<?> files = null;
try {
CommandLine cmd = parser.parse(options, args);
files = cmd.getArgList();
if (files.isEmpty() || cmd.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("WAL <filename...>", options, true);
System.exit(-1);
}
// configure the pretty printer using command line options
if (cmd.hasOption("p"))
printer.enableValues();
if (cmd.hasOption("j"))
printer.enableJSON();
if (cmd.hasOption("r"))
printer.setRegionFilter(cmd.getOptionValue("r"));
if (cmd.hasOption("s"))
printer.setSequenceFilter(Long.parseLong(cmd.getOptionValue("s")));
if (cmd.hasOption("w"))
printer.setRowFilter(cmd.getOptionValue("w"));
} catch (ParseException e) {
e.printStackTrace();
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("HFile filename(s) ", options, true);
System.exit(-1);
}
// get configuration, file system, and process the given files
Configuration conf = HBaseConfiguration.create();
FSUtils.setFsDefault(conf, FSUtils.getRootDir(conf));
// begin output
printer.beginPersistentOutput();
for (Object f : files) {
Path file = new Path((String) f);
FileSystem fs = file.getFileSystem(conf);
if (!fs.exists(file)) {
System.err.println("ERROR, file doesnt exist: " + file);
return;
}
printer.processFile(conf, file);
}
printer.endPersistentOutput();
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class TimelineSchemaCreator method parseArgs.
/**
* Parse command-line arguments.
*
* @param args
* command line arguments passed to program.
* @return parsed command line.
* @throws ParseException
*/
private static CommandLine parseArgs(String[] args) throws ParseException {
Options options = new Options();
// Input
Option o = new Option(ENTITY_TABLE_NAME_SHORT, "entityTableName", true, "entity table name");
o.setArgName("entityTableName");
o.setRequired(false);
options.addOption(o);
o = new Option(TTL_OPTION_SHORT, "metricsTTL", true, "TTL for metrics column family");
o.setArgName("metricsTTL");
o.setRequired(false);
options.addOption(o);
o = new Option(APP_TO_FLOW_TABLE_NAME_SHORT, "appToflowTableName", true, "app to flow table name");
o.setArgName("appToflowTableName");
o.setRequired(false);
options.addOption(o);
o = new Option(APP_TABLE_NAME_SHORT, "applicationTableName", true, "application table name");
o.setArgName("applicationTableName");
o.setRequired(false);
options.addOption(o);
// Options without an argument
// No need to set arg name since we do not need an argument here
o = new Option(SKIP_EXISTING_TABLE_OPTION_SHORT, "skipExistingTable", false, "skip existing Hbase tables and continue to create new tables");
o.setRequired(false);
options.addOption(o);
CommandLineParser parser = new PosixParser();
CommandLine commandLine = null;
try {
commandLine = parser.parse(options, args);
} catch (Exception e) {
LOG.error("ERROR: " + e.getMessage() + "\n");
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(NAME + " ", options, true);
System.exit(-1);
}
return commandLine;
}
Aggregations