use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class OfflineEditsViewer method run.
/**
* Main entry point for ToolRunner (see ToolRunner docs)
*
* @param argv The parameters passed to this program.
* @return 0 on success, non zero on error.
*/
@Override
public int run(String[] argv) throws Exception {
Options options = buildOptions();
if (argv.length == 0) {
printHelp();
return 0;
}
// print help and exit with zero exit code
if (argv.length == 1 && isHelpOption(argv[0])) {
printHelp();
return 0;
}
CommandLineParser parser = new PosixParser();
CommandLine cmd;
try {
cmd = parser.parse(options, argv);
} catch (ParseException e) {
System.out.println("Error parsing command-line options: " + e.getMessage());
printHelp();
return -1;
}
if (cmd.hasOption("h")) {
// print help and exit with non zero exit code since
// it is not expected to give help and other options together.
printHelp();
return -1;
}
String inputFileName = cmd.getOptionValue("i");
String outputFileName = cmd.getOptionValue("o");
String processor = cmd.getOptionValue("p");
if (processor == null) {
processor = defaultProcessor;
}
Flags flags = new Flags();
if (cmd.hasOption("r")) {
flags.setRecoveryMode();
}
if (cmd.hasOption("f")) {
flags.setFixTxIds();
}
if (cmd.hasOption("v")) {
flags.setPrintToScreen();
}
return go(inputFileName, outputFileName, processor, flags, null);
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class HadoopArchiveLogs method handleOpts.
private void handleOpts(String[] args) throws ParseException {
Options opts = new Options();
Option helpOpt = new Option(HELP_OPTION, false, "Prints this message");
Option maxEligibleOpt = new Option(MAX_ELIGIBLE_APPS_OPTION, true, "The maximum number of eligible apps to process (default: " + DEFAULT_MAX_ELIGIBLE + " (all))");
maxEligibleOpt.setArgName("n");
Option minNumLogFilesOpt = new Option(MIN_NUM_LOG_FILES_OPTION, true, "The minimum number of log files required to be eligible (default: " + DEFAULT_MIN_NUM_LOG_FILES + ")");
minNumLogFilesOpt.setArgName("n");
Option maxTotalLogsSizeOpt = new Option(MAX_TOTAL_LOGS_SIZE_OPTION, true, "The maximum total logs size (in megabytes) required to be eligible" + " (default: " + DEFAULT_MAX_TOTAL_LOGS_SIZE + ")");
maxTotalLogsSizeOpt.setArgName("megabytes");
Option memoryOpt = new Option(MEMORY_OPTION, true, "The amount of memory (in megabytes) for each container (default: " + DEFAULT_MEMORY + ")");
memoryOpt.setArgName("megabytes");
Option verboseOpt = new Option(VERBOSE_OPTION, false, "Print more details.");
Option forceOpt = new Option(FORCE_OPTION, false, "Force recreating the working directory if an existing one is found. " + "This should only be used if you know that another instance is " + "not currently running");
Option noProxyOpt = new Option(NO_PROXY_OPTION, false, "When specified, all processing will be done as the user running this" + " command (or the Yarn user if DefaultContainerExecutor is in " + "use). When not specified, all processing will be done as the " + "user who owns that application; if the user running this command" + " is not allowed to impersonate that user, it will fail");
opts.addOption(helpOpt);
opts.addOption(maxEligibleOpt);
opts.addOption(minNumLogFilesOpt);
opts.addOption(maxTotalLogsSizeOpt);
opts.addOption(memoryOpt);
opts.addOption(verboseOpt);
opts.addOption(forceOpt);
opts.addOption(noProxyOpt);
try {
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args);
if (commandLine.hasOption(HELP_OPTION)) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
System.exit(0);
}
if (commandLine.hasOption(MAX_ELIGIBLE_APPS_OPTION)) {
maxEligible = Integer.parseInt(commandLine.getOptionValue(MAX_ELIGIBLE_APPS_OPTION));
if (maxEligible == 0) {
LOG.info("Setting " + MAX_ELIGIBLE_APPS_OPTION + " to 0 accomplishes " + "nothing. Please either set it to a negative value " + "(default, all) or a more reasonable value.");
System.exit(0);
}
}
if (commandLine.hasOption(MIN_NUM_LOG_FILES_OPTION)) {
minNumLogFiles = Integer.parseInt(commandLine.getOptionValue(MIN_NUM_LOG_FILES_OPTION));
}
if (commandLine.hasOption(MAX_TOTAL_LOGS_SIZE_OPTION)) {
maxTotalLogsSize = Long.parseLong(commandLine.getOptionValue(MAX_TOTAL_LOGS_SIZE_OPTION));
maxTotalLogsSize *= 1024L * 1024L;
}
if (commandLine.hasOption(MEMORY_OPTION)) {
memory = Long.parseLong(commandLine.getOptionValue(MEMORY_OPTION));
}
if (commandLine.hasOption(VERBOSE_OPTION)) {
verbose = true;
}
if (commandLine.hasOption(FORCE_OPTION)) {
force = true;
}
if (commandLine.hasOption(NO_PROXY_OPTION)) {
proxy = false;
}
} catch (ParseException pe) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("mapred archive-logs", opts);
throw pe;
}
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class HadoopArchiveLogsRunner method handleOpts.
private void handleOpts(String[] args) throws ParseException {
Options opts = new Options();
Option appIdOpt = new Option(APP_ID_OPTION, true, "Application ID");
appIdOpt.setRequired(true);
Option userOpt = new Option(USER_OPTION, true, "User");
userOpt.setRequired(true);
Option workingDirOpt = new Option(WORKING_DIR_OPTION, true, "Working Directory");
workingDirOpt.setRequired(true);
Option remoteLogDirOpt = new Option(REMOTE_ROOT_LOG_DIR_OPTION, true, "Remote Root Log Directory");
remoteLogDirOpt.setRequired(true);
Option suffixOpt = new Option(SUFFIX_OPTION, true, "Suffix");
suffixOpt.setRequired(true);
Option useProxyOpt = new Option(NO_PROXY_OPTION, false, "Use Proxy");
opts.addOption(appIdOpt);
opts.addOption(userOpt);
opts.addOption(workingDirOpt);
opts.addOption(remoteLogDirOpt);
opts.addOption(suffixOpt);
opts.addOption(useProxyOpt);
CommandLineParser parser = new GnuParser();
CommandLine commandLine = parser.parse(opts, args);
appId = commandLine.getOptionValue(APP_ID_OPTION);
user = commandLine.getOptionValue(USER_OPTION);
workingDir = commandLine.getOptionValue(WORKING_DIR_OPTION);
remoteLogDir = commandLine.getOptionValue(REMOTE_ROOT_LOG_DIR_OPTION);
suffix = commandLine.getOptionValue(SUFFIX_OPTION);
proxy = true;
if (commandLine.hasOption(NO_PROXY_OPTION)) {
proxy = false;
}
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class OptionsParser method parse.
/**
* The parse method parses the command-line options, and creates
* a corresponding Options object.
* @param args Command-line arguments (excluding the options consumed
* by the GenericOptionsParser).
* @return The Options object, corresponding to the specified command-line.
* @throws IllegalArgumentException Thrown if the parse fails.
*/
public static DistCpOptions parse(String[] args) throws IllegalArgumentException {
CommandLineParser parser = new CustomParser();
CommandLine command;
try {
command = parser.parse(cliOptions, args, true);
} catch (ParseException e) {
throw new IllegalArgumentException("Unable to parse arguments. " + Arrays.toString(args), e);
}
DistCpOptions option = parseSourceAndTargetPaths(command);
option.setIgnoreFailures(command.hasOption(DistCpOptionSwitch.IGNORE_FAILURES.getSwitch()));
option.setAtomicCommit(command.hasOption(DistCpOptionSwitch.ATOMIC_COMMIT.getSwitch()));
option.setSyncFolder(command.hasOption(DistCpOptionSwitch.SYNC_FOLDERS.getSwitch()));
option.setOverwrite(command.hasOption(DistCpOptionSwitch.OVERWRITE.getSwitch()));
option.setAppend(command.hasOption(DistCpOptionSwitch.APPEND.getSwitch()));
option.setDeleteMissing(command.hasOption(DistCpOptionSwitch.DELETE_MISSING.getSwitch()));
option.setSkipCRC(command.hasOption(DistCpOptionSwitch.SKIP_CRC.getSwitch()));
if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch()) && option.shouldAtomicCommit()) {
String workPath = getVal(command, DistCpOptionSwitch.WORK_PATH.getSwitch());
if (workPath != null && !workPath.isEmpty()) {
option.setAtomicWorkPath(new Path(workPath));
}
} else if (command.hasOption(DistCpOptionSwitch.WORK_PATH.getSwitch())) {
throw new IllegalArgumentException("-tmp work-path can only be specified along with -atomic");
}
if (command.hasOption(DistCpOptionSwitch.LOG_PATH.getSwitch())) {
option.setLogPath(new Path(getVal(command, DistCpOptionSwitch.LOG_PATH.getSwitch())));
}
if (command.hasOption(DistCpOptionSwitch.BLOCKING.getSwitch())) {
option.setBlocking(false);
}
parseBandwidth(command, option);
parseNumListStatusThreads(command, option);
parseMaxMaps(command, option);
if (command.hasOption(DistCpOptionSwitch.COPY_STRATEGY.getSwitch())) {
option.setCopyStrategy(getVal(command, DistCpOptionSwitch.COPY_STRATEGY.getSwitch()));
}
parsePreserveStatus(command, option);
if (command.hasOption(DistCpOptionSwitch.DIFF.getSwitch())) {
String[] snapshots = getVals(command, DistCpOptionSwitch.DIFF.getSwitch());
checkSnapshotsArgs(snapshots);
option.setUseDiff(snapshots[0], snapshots[1]);
}
if (command.hasOption(DistCpOptionSwitch.RDIFF.getSwitch())) {
String[] snapshots = getVals(command, DistCpOptionSwitch.RDIFF.getSwitch());
checkSnapshotsArgs(snapshots);
option.setUseRdiff(snapshots[0], snapshots[1]);
}
parseFileLimit(command);
parseSizeLimit(command);
if (command.hasOption(DistCpOptionSwitch.FILTERS.getSwitch())) {
option.setFiltersFile(getVal(command, DistCpOptionSwitch.FILTERS.getSwitch()));
}
option.validate();
return option;
}
use of org.apache.commons.cli.CommandLineParser in project hadoop by apache.
the class RumenToSLSConverter method main.
public static void main(String[] args) throws Exception {
Options options = new Options();
options.addOption("input", true, "input rumen json file");
options.addOption("outputJobs", true, "output jobs file");
options.addOption("outputNodes", true, "output nodes file");
CommandLineParser parser = new GnuParser();
CommandLine cmd = parser.parse(options, args);
if (!cmd.hasOption("input") || !cmd.hasOption("outputJobs") || !cmd.hasOption("outputNodes")) {
System.err.println();
System.err.println("ERROR: Missing input or output file");
System.err.println();
System.err.println("LoadGenerator creates a SLS script " + "from a Hadoop Rumen output");
System.err.println();
System.err.println("Options: -input FILE -outputJobs FILE " + "-outputNodes FILE");
System.err.println();
System.exit(1);
}
String inputFile = cmd.getOptionValue("input");
String outputJsonFile = cmd.getOptionValue("outputJobs");
String outputNodeFile = cmd.getOptionValue("outputNodes");
// check existing
if (!new File(inputFile).exists()) {
System.err.println();
System.err.println("ERROR: input does not exist");
System.exit(1);
}
if (new File(outputJsonFile).exists()) {
System.err.println();
System.err.println("ERROR: output job file is existing");
System.exit(1);
}
if (new File(outputNodeFile).exists()) {
System.err.println();
System.err.println("ERROR: output node file is existing");
System.exit(1);
}
File jsonFile = new File(outputJsonFile);
if (!jsonFile.getParentFile().exists() && !jsonFile.getParentFile().mkdirs()) {
System.err.println("ERROR: Cannot create output directory in path: " + jsonFile.getParentFile().getAbsoluteFile());
System.exit(1);
}
File nodeFile = new File(outputNodeFile);
if (!nodeFile.getParentFile().exists() && !nodeFile.getParentFile().mkdirs()) {
System.err.println("ERROR: Cannot create output directory in path: " + nodeFile.getParentFile().getAbsoluteFile());
System.exit(1);
}
generateSLSLoadFile(inputFile, outputJsonFile);
generateSLSNodeFile(outputNodeFile);
}
Aggregations