use of org.apache.commons.cli.Option in project hadoop by apache.
the class HadoopArchiveLogs method handleOpts.
private void handleOpts(String[] args) throws ParseException {
Options opts = new Options();
Option helpOpt = new Option(HELP_OPTION, false, "Prints this message");
Option maxEligibleOpt = new Option(MAX_ELIGIBLE_APPS_OPTION, true, "The maximum number of eligible apps to process (default: " + DEFAULT_MAX_ELIGIBLE + " (all))");
maxEligibleOpt.setArgName("n");
Option minNumLogFilesOpt = new Option(MIN_NUM_LOG_FILES_OPTION, true, "The minimum number of log files required to be eligible (default: " + DEFAULT_MIN_NUM_LOG_FILES + ")");
minNumLogFilesOpt.setArgName("n");
Option maxTotalLogsSizeOpt = new Option(MAX_TOTAL_LOGS_SIZE_OPTION, true, "The maximum total logs size (in megabytes) required to be eligible" + " (default: " + DEFAULT_MAX_TOTAL_LOGS_SIZE + ")");
maxTotalLogsSizeOpt.setArgName("megabytes");
Option memoryOpt = new Option(MEMORY_OPTION, true, "The amount of memory (in megabytes) for each container (default: " + DEFAULT_MEMORY + ")");
memoryOpt.setArgName("megabytes");
Option verboseOpt = new Option(VERBOSE_OPTION, false, "Print more details.");
Option forceOpt = new Option(FORCE_OPTION, false, "Force recreating the working directory if an existing one is found. " + "This should only be used if you know that another instance is " + "not currently running");
Option noProxyOpt = new Option(NO_PROXY_OPTION, false, "When specified, all processing will be done as the user running this" + " command (or the Yarn user if DefaultContainerExecutor is in " + "use). When not specified, all processing will be done as the " + "user who owns that application; if the user running this command" + " is not allowed to impersonate that user, it will fail");
opts.addOption(helpOpt);
opts.addOption(maxEligibleOpt);
opts.addOption(minNumLogFilesOpt);
opts.addOption(maxTotalLogsSizeOpt);
opts.addOption(memoryOpt);
opts.addOption(verboseOpt);
opts.addOption(forceOpt);
opts.addOption(noProxyOpt);
try {
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args);
if (commandLine.hasOption(HELP_OPTION)) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
System.exit(0);
}
if (commandLine.hasOption(MAX_ELIGIBLE_APPS_OPTION)) {
maxEligible = Integer.parseInt(commandLine.getOptionValue(MAX_ELIGIBLE_APPS_OPTION));
if (maxEligible == 0) {
LOG.info("Setting " + MAX_ELIGIBLE_APPS_OPTION + " to 0 accomplishes " + "nothing. Please either set it to a negative value " + "(default, all) or a more reasonable value.");
System.exit(0);
}
}
if (commandLine.hasOption(MIN_NUM_LOG_FILES_OPTION)) {
minNumLogFiles = Integer.parseInt(commandLine.getOptionValue(MIN_NUM_LOG_FILES_OPTION));
}
if (commandLine.hasOption(MAX_TOTAL_LOGS_SIZE_OPTION)) {
maxTotalLogsSize = Long.parseLong(commandLine.getOptionValue(MAX_TOTAL_LOGS_SIZE_OPTION));
maxTotalLogsSize *= 1024L * 1024L;
}
if (commandLine.hasOption(MEMORY_OPTION)) {
memory = Long.parseLong(commandLine.getOptionValue(MEMORY_OPTION));
}
if (commandLine.hasOption(VERBOSE_OPTION)) {
verbose = true;
}
if (commandLine.hasOption(FORCE_OPTION)) {
force = true;
}
if (commandLine.hasOption(NO_PROXY_OPTION)) {
proxy = false;
}
} catch (ParseException pe) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
throw pe;
}
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class HadoopArchiveLogsRunner method handleOpts.
private void handleOpts(String[] args) throws ParseException {
Options opts = new Options();
Option appIdOpt = new Option(APP_ID_OPTION, true, "Application ID");
appIdOpt.setRequired(true);
Option userOpt = new Option(USER_OPTION, true, "User");
userOpt.setRequired(true);
Option workingDirOpt = new Option(WORKING_DIR_OPTION, true, "Working Directory");
workingDirOpt.setRequired(true);
Option remoteLogDirOpt = new Option(REMOTE_ROOT_LOG_DIR_OPTION, true, "Remote Root Log Directory");
remoteLogDirOpt.setRequired(true);
Option suffixOpt = new Option(SUFFIX_OPTION, true, "Suffix");
suffixOpt.setRequired(true);
Option useProxyOpt = new Option(NO_PROXY_OPTION, false, "Use Proxy");
opts.addOption(appIdOpt);
opts.addOption(userOpt);
opts.addOption(workingDirOpt);
opts.addOption(remoteLogDirOpt);
opts.addOption(suffixOpt);
opts.addOption(useProxyOpt);
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args);
appId = commandLine.getOptionValue(APP_ID_OPTION);
user = commandLine.getOptionValue(USER_OPTION);
workingDir = commandLine.getOptionValue(WORKING_DIR_OPTION);
remoteLogDir = commandLine.getOptionValue(REMOTE_ROOT_LOG_DIR_OPTION);
suffix = commandLine.getOptionValue(SUFFIX_OPTION);
proxy = true;
if (commandLine.hasOption(NO_PROXY_OPTION)) {
proxy = false;
}
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class RegistryCli method rm.
@SuppressWarnings("unchecked")
public int rm(String[] args) {
Option recursive = OptionBuilder.withArgName("recursive").withDescription("delete recursively").create("r");
Options rmOption = new Options();
rmOption.addOption(recursive);
boolean recursiveOpt = false;
CommandLineParser parser = new GnuParser();
try {
CommandLine line = parser.parse(rmOption, args);
List<String> argsList = line.getArgList();
if (argsList.size() != 2) {
return usageError("RM requires exactly one path argument", RM_USAGE);
}
if (!validatePath(argsList.get(1))) {
return -1;
}
try {
if (line.hasOption("r")) {
recursiveOpt = true;
}
registry.delete(argsList.get(1), recursiveOpt);
return 0;
} catch (Exception e) {
syserr.println(analyzeException("rm", e, argsList));
}
return -1;
} catch (ParseException exp) {
return usageError("Invalid syntax " + exp.toString(), RM_USAGE);
}
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class NodeCLI method run.
@Override
public int run(String[] args) throws Exception {
Options opts = new Options();
opts.addOption(HELP_CMD, false, "Displays help for all commands.");
opts.addOption(STATUS_CMD, true, "Prints the status report of the node.");
opts.addOption(LIST_CMD, false, "List all running nodes. " + "Supports optional use of -states to filter nodes " + "based on node state, all -all to list all nodes, " + "-showDetails to display more details about each node.");
Option nodeStateOpt = new Option(NODE_STATE_CMD, true, "Works with -list to filter nodes based on input comma-separated " + "list of node states. " + getAllValidNodeStates());
nodeStateOpt.setValueSeparator(',');
nodeStateOpt.setArgs(Option.UNLIMITED_VALUES);
nodeStateOpt.setArgName("States");
opts.addOption(nodeStateOpt);
Option allOpt = new Option(NODE_ALL, false, "Works with -list to list all nodes.");
opts.addOption(allOpt);
Option showDetailsOpt = new Option(NODE_SHOW_DETAILS, false, "Works with -list to show more details about each node.");
opts.addOption(showDetailsOpt);
opts.getOption(STATUS_CMD).setArgName("NodeId");
if (args != null && args.length > 0) {
for (int i = args.length - 1; i >= 0; i--) {
if (args[i].equalsIgnoreCase("-" + NODE_ALL)) {
args[i] = "-" + NODE_ALL;
}
}
}
int exitCode = -1;
CommandLine cliParser = null;
try {
cliParser = new GnuParser().parse(opts, args);
} catch (MissingArgumentException ex) {
sysout.println("Missing argument for options");
printUsage(opts);
return exitCode;
}
if (cliParser.hasOption("status")) {
if (args.length != 2) {
printUsage(opts);
return exitCode;
}
printNodeStatus(cliParser.getOptionValue("status"));
} else if (cliParser.hasOption("list")) {
Set<NodeState> nodeStates = new HashSet<NodeState>();
if (cliParser.hasOption(NODE_ALL)) {
for (NodeState state : NodeState.values()) {
nodeStates.add(state);
}
} else if (cliParser.hasOption(NODE_STATE_CMD)) {
String[] types = cliParser.getOptionValues(NODE_STATE_CMD);
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
try {
nodeStates.add(NodeState.valueOf(org.apache.hadoop.util.StringUtils.toUpperCase(type.trim())));
} catch (IllegalArgumentException ex) {
sysout.println("The node state " + type + " is invalid.");
sysout.println(getAllValidNodeStates());
return exitCode;
}
}
}
}
} else {
nodeStates.add(NodeState.RUNNING);
}
// List all node details with more information.
if (cliParser.hasOption(NODE_SHOW_DETAILS)) {
listDetailedClusterNodes(nodeStates);
} else {
listClusterNodes(nodeStates);
}
} else if (cliParser.hasOption(HELP_CMD)) {
printUsage(opts);
return 0;
} else {
syserr.println("Invalid Command Usage : ");
printUsage(opts);
}
return 0;
}
use of org.apache.commons.cli.Option in project hadoop by apache.
the class RMAdminCLI method handleRefreshNodes.
// A helper method to reduce the number of lines of run()
private int handleRefreshNodes(String[] args, String cmd, boolean isHAEnabled) throws IOException, YarnException, ParseException {
Options opts = new Options();
opts.addOption("refreshNodes", false, "Refresh the hosts information at the ResourceManager.");
Option gracefulOpt = new Option("g", "graceful", true, "Wait for timeout before marking the NodeManager as decommissioned.");
gracefulOpt.setOptionalArg(true);
opts.addOption(gracefulOpt);
opts.addOption("client", false, "Indicates the timeout tracking should be handled by the client.");
opts.addOption("server", false, "Indicates the timeout tracking should be handled by the RM.");
int exitCode = -1;
CommandLine cliParser = null;
try {
cliParser = new GnuParser().parse(opts, args);
} catch (MissingArgumentException ex) {
System.out.println("Missing argument for options");
printUsage(args[0], isHAEnabled);
return exitCode;
}
int timeout = -1;
if (cliParser.hasOption("g")) {
String strTimeout = cliParser.getOptionValue("g");
if (strTimeout != null) {
timeout = validateTimeout(strTimeout);
}
String trackingMode = null;
if (cliParser.hasOption("client")) {
trackingMode = "client";
} else if (cliParser.hasOption("server")) {
trackingMode = "server";
} else {
printUsage(cmd, isHAEnabled);
return -1;
}
return refreshNodes(timeout, trackingMode);
} else {
return refreshNodes();
}
}
Aggregations