use of org.apache.commons.cli.ParseException in project hadoop by apache.
the class HadoopArchiveLogs method handleOpts.
private void handleOpts(String[] args) throws ParseException {
Options opts = new Options();
Option helpOpt = new Option(HELP_OPTION, false, "Prints this message");
Option maxEligibleOpt = new Option(MAX_ELIGIBLE_APPS_OPTION, true, "The maximum number of eligible apps to process (default: " + DEFAULT_MAX_ELIGIBLE + " (all))");
maxEligibleOpt.setArgName("n");
Option minNumLogFilesOpt = new Option(MIN_NUM_LOG_FILES_OPTION, true, "The minimum number of log files required to be eligible (default: " + DEFAULT_MIN_NUM_LOG_FILES + ")");
minNumLogFilesOpt.setArgName("n");
Option maxTotalLogsSizeOpt = new Option(MAX_TOTAL_LOGS_SIZE_OPTION, true, "The maximum total logs size (in megabytes) required to be eligible" + " (default: " + DEFAULT_MAX_TOTAL_LOGS_SIZE + ")");
maxTotalLogsSizeOpt.setArgName("megabytes");
Option memoryOpt = new Option(MEMORY_OPTION, true, "The amount of memory (in megabytes) for each container (default: " + DEFAULT_MEMORY + ")");
memoryOpt.setArgName("megabytes");
Option verboseOpt = new Option(VERBOSE_OPTION, false, "Print more details.");
Option forceOpt = new Option(FORCE_OPTION, false, "Force recreating the working directory if an existing one is found. " + "This should only be used if you know that another instance is " + "not currently running");
Option noProxyOpt = new Option(NO_PROXY_OPTION, false, "When specified, all processing will be done as the user running this" + " command (or the Yarn user if DefaultContainerExecutor is in " + "use). When not specified, all processing will be done as the " + "user who owns that application; if the user running this command" + " is not allowed to impersonate that user, it will fail");
opts.addOption(helpOpt);
opts.addOption(maxEligibleOpt);
opts.addOption(minNumLogFilesOpt);
opts.addOption(maxTotalLogsSizeOpt);
opts.addOption(memoryOpt);
opts.addOption(verboseOpt);
opts.addOption(forceOpt);
opts.addOption(noProxyOpt);
try {
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args);
if (commandLine.hasOption(HELP_OPTION)) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
System.exit(0);
}
if (commandLine.hasOption(MAX_ELIGIBLE_APPS_OPTION)) {
maxEligible = Integer.parseInt(commandLine.getOptionValue(MAX_ELIGIBLE_APPS_OPTION));
if (maxEligible == 0) {
LOG.info("Setting " + MAX_ELIGIBLE_APPS_OPTION + " to 0 accomplishes " + "nothing. Please either set it to a negative value " + "(default, all) or a more reasonable value.");
System.exit(0);
}
}
if (commandLine.hasOption(MIN_NUM_LOG_FILES_OPTION)) {
minNumLogFiles = Integer.parseInt(commandLine.getOptionValue(MIN_NUM_LOG_FILES_OPTION));
}
if (commandLine.hasOption(MAX_TOTAL_LOGS_SIZE_OPTION)) {
maxTotalLogsSize = Long.parseLong(commandLine.getOptionValue(MAX_TOTAL_LOGS_SIZE_OPTION));
maxTotalLogsSize *= 1024L * 1024L;
}
if (commandLine.hasOption(MEMORY_OPTION)) {
memory = Long.parseLong(commandLine.getOptionValue(MEMORY_OPTION));
}
if (commandLine.hasOption(VERBOSE_OPTION)) {
verbose = true;
}
if (commandLine.hasOption(FORCE_OPTION)) {
force = true;
}
if (commandLine.hasOption(NO_PROXY_OPTION)) {
proxy = false;
}
} catch (ParseException pe) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
throw pe;
}
}
use of org.apache.commons.cli.ParseException in project hadoop by apache.
the class OptionsParser method parse.
/**
* The parse method parses the command-line options, and creates
* a corresponding Options object.
* @param args Command-line arguments (excluding the options consumed
* by the GenericOptionsParser).
* @return The Options object, corresponding to the specified command-line.
* @throws IllegalArgumentException Thrown if the parse fails.
*/
public static DistCpOptions parse(String[] args) throws IllegalArgumentException {
CommandLineParser parser = new CustomParser();
CommandLine command;
try {
command = parser.parse(cliOptions, args, true);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e);
}
DistCpOptions option = parseSourceAndTargetPaths(command);
option.setIgnoreFailures(command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch()));
option.setAtomicCommit(command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch()));
option.setSyncFolder(command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch()));
option.setOverwrite(command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch()));
option.setAppend(command.hasOption(DistCpOptionSwitch.APPEND.getSwitch()));
option.setDeleteMissing(command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch()));
option.setSkipCRC(command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch()));
if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch()) && option.shouldAtomicCommit()) {
String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch());
if (workPath != null && !workPath.isEmpty()) {
option.setAtomicWorkPath(new Path(workPath));
}
} else if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) {
throw new IllegalArgumentException("-tmp work-path can only be specified along with -atomic");
}
if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) {
option.setLogPath(new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) {
option.setBlocking(false);
}
parseBandwidth(command, option);
parseNumListStatusThreads(command, option);
parseMaxMaps(command, option);
if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) {
option.setCopyStrategy(getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch()));
}
parsePreserveStatus(command, option);
if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) {
String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch());
checkSnapshotsArgs(snapshots);
option.setUseDiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) {
String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch());
checkSnapshotsArgs(snapshots);
option.setUseRdiff(snapshots[0], snapshots[1]);
}
parseFileLimit(command);
parseSizeLimit(command);
if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) {
option.setFiltersFile(getVal(command, DistCpOptionSwitch.FILTERS.getSwitch()));
}
option.validate();
return option;
}
use of org.apache.commons.cli.ParseException in project hadoop by apache.
the class RegistryCli method rm.
@SuppressWarnings("unchecked")
public int rm(String[] args) {
Option recursive = OptionBuilder.withArgName("recursive").withDescription("delete recursively").create("r");
Options rmOption = new Options();
rmOption.addOption(recursive);
boolean recursiveOpt = false;
CommandLineParser parser = new GnuParser();
try {
CommandLine line = parser.parse(rmOption, args);
List<String> argsList = line.getArgList();
if (argsList.size() != 2) {
return usageError("RM requires exactly one path argument", RM_USAGE);
}
if (!validatePath(argsList.get(1))) {
return -1;
}
try {
if (line.hasOption("r")) {
recursiveOpt = true;
}
registry.delete(argsList.get(1), recursiveOpt);
return 0;
} catch (Exception e) {
syserr.println(analyzeException("rm", e, argsList));
}
return -1;
} catch (ParseException exp) {
return usageError("Invalid syntax " + exp.toString(), RM_USAGE);
}
}
use of org.apache.commons.cli.ParseException in project hadoop by apache.
the class RegistryCli method resolve.
@SuppressWarnings("unchecked")
public int resolve(String[] args) {
Options resolveOption = new Options();
CommandLineParser parser = new GnuParser();
try {
CommandLine line = parser.parse(resolveOption, args);
List<String> argsList = line.getArgList();
if (argsList.size() != 2) {
return usageError("resolve requires exactly one path argument", RESOLVE_USAGE);
}
if (!validatePath(argsList.get(1))) {
return -1;
}
try {
ServiceRecord record = registry.resolve(argsList.get(1));
for (Endpoint endpoint : record.external) {
sysout.println(" Endpoint(ProtocolType=" + endpoint.protocolType + ", Api=" + endpoint.api + ");" + " Addresses(AddressType=" + endpoint.addressType + ") are: ");
for (Map<String, String> address : endpoint.addresses) {
sysout.println("[ ");
for (Map.Entry<String, String> entry : address.entrySet()) {
sysout.print("\t" + entry.getKey() + ":" + entry.getValue());
}
sysout.println("\n]");
}
sysout.println();
}
return 0;
} catch (Exception e) {
syserr.println(analyzeException("resolve", e, argsList));
}
return -1;
} catch (ParseException exp) {
return usageError("Invalid syntax " + exp, RESOLVE_USAGE);
}
}
use of org.apache.commons.cli.ParseException in project hadoop by apache.
the class Submitter method run.
@Override
public int run(String[] args) throws Exception {
CommandLineParser cli = new CommandLineParser();
if (args.length == 0) {
cli.printUsage();
return 1;
}
cli.addOption("input", false, "input path to the maps", "path");
cli.addOption("output", false, "output path from the reduces", "path");
cli.addOption("jar", false, "job jar file", "path");
cli.addOption("inputformat", false, "java classname of InputFormat", "class");
//cli.addArgument("javareader", false, "is the RecordReader in Java");
cli.addOption("map", false, "java classname of Mapper", "class");
cli.addOption("partitioner", false, "java classname of Partitioner", "class");
cli.addOption("reduce", false, "java classname of Reducer", "class");
cli.addOption("writer", false, "java classname of OutputFormat", "class");
cli.addOption("program", false, "URI to application executable", "class");
cli.addOption("reduces", false, "number of reduces", "num");
cli.addOption("jobconf", false, "\"n1=v1,n2=v2,..\" (Deprecated) Optional. Add or override a JobConf property.", "key=val");
cli.addOption("lazyOutput", false, "Optional. Create output lazily", "boolean");
Parser parser = cli.createParser();
try {
GenericOptionsParser genericParser = new GenericOptionsParser(getConf(), args);
CommandLine results = parser.parse(cli.options, genericParser.getRemainingArgs());
JobConf job = new JobConf(getConf());
if (results.hasOption("input")) {
FileInputFormat.setInputPaths(job, results.getOptionValue("input"));
}
if (results.hasOption("output")) {
FileOutputFormat.setOutputPath(job, new Path(results.getOptionValue("output")));
}
if (results.hasOption("jar")) {
job.setJar(results.getOptionValue("jar"));
}
if (results.hasOption("inputformat")) {
setIsJavaRecordReader(job, true);
job.setInputFormat(getClass(results, "inputformat", job, InputFormat.class));
}
if (results.hasOption("javareader")) {
setIsJavaRecordReader(job, true);
}
if (results.hasOption("map")) {
setIsJavaMapper(job, true);
job.setMapperClass(getClass(results, "map", job, Mapper.class));
}
if (results.hasOption("partitioner")) {
job.setPartitionerClass(getClass(results, "partitioner", job, Partitioner.class));
}
if (results.hasOption("reduce")) {
setIsJavaReducer(job, true);
job.setReducerClass(getClass(results, "reduce", job, Reducer.class));
}
if (results.hasOption("reduces")) {
job.setNumReduceTasks(Integer.parseInt(results.getOptionValue("reduces")));
}
if (results.hasOption("writer")) {
setIsJavaRecordWriter(job, true);
job.setOutputFormat(getClass(results, "writer", job, OutputFormat.class));
}
if (results.hasOption("lazyOutput")) {
if (Boolean.parseBoolean(results.getOptionValue("lazyOutput"))) {
LazyOutputFormat.setOutputFormatClass(job, job.getOutputFormat().getClass());
}
}
if (results.hasOption("program")) {
setExecutable(job, results.getOptionValue("program"));
}
if (results.hasOption("jobconf")) {
LOG.warn("-jobconf option is deprecated, please use -D instead.");
String options = results.getOptionValue("jobconf");
StringTokenizer tokenizer = new StringTokenizer(options, ",");
while (tokenizer.hasMoreTokens()) {
String keyVal = tokenizer.nextToken().trim();
String[] keyValSplit = keyVal.split("=");
job.set(keyValSplit[0], keyValSplit[1]);
}
}
// if they gave us a jar file, include it into the class path
String jarFile = job.getJar();
if (jarFile != null) {
final URL[] urls = new URL[] { FileSystem.getLocal(job).pathToFile(new Path(jarFile)).toURL() };
//FindBugs complains that creating a URLClassLoader should be
//in a doPrivileged() block.
ClassLoader loader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
public ClassLoader run() {
return new URLClassLoader(urls);
}
});
job.setClassLoader(loader);
}
runJob(job);
return 0;
} catch (ParseException pe) {
LOG.info("Error : " + pe);
cli.printUsage();
return 1;
}
}
Aggregations