use of org.apache.commons.cli.CommandLineParser in project hbase by apache.
the class AbstractHBaseTool method parseArgs.
protected CommandLine parseArgs(String[] args) throws ParseException {
options.addOption(SHORT_HELP_OPTION, LONG_HELP_OPTION, false, "Show usage");
addOptions();
CommandLineParser parser = new BasicParser();
return parser.parse(options, args);
}
use of org.apache.commons.cli.CommandLineParser in project storm by apache.
the class StormSqlRunner method main.
public static void main(String[] args) throws Exception {
Options options = buildOptions();
CommandLineParser parser = new DefaultParser();
CommandLine commandLine = parser.parse(options, args);
if (!commandLine.hasOption(OPTION_SQL_FILE_LONG)) {
printUsageAndExit(options, OPTION_SQL_FILE_LONG + " is required");
}
String filePath = commandLine.getOptionValue(OPTION_SQL_FILE_LONG);
List<String> stmts = Files.readAllLines(Paths.get(filePath), StandardCharsets.UTF_8);
StormSql sql = StormSql.construct();
@SuppressWarnings("unchecked") Map<String, ?> conf = Utils.readStormConfig();
if (commandLine.hasOption(OPTION_SQL_EXPLAIN_LONG)) {
sql.explain(stmts);
} else if (commandLine.hasOption(OPTION_SQL_TOPOLOGY_NAME_LONG)) {
String topoName = commandLine.getOptionValue(OPTION_SQL_TOPOLOGY_NAME_LONG);
SubmitOptions submitOptions = new SubmitOptions(TopologyInitialStatus.ACTIVE);
sql.submit(topoName, stmts, conf, submitOptions, null, null);
} else {
printUsageAndExit(options, "Either " + OPTION_SQL_TOPOLOGY_NAME_LONG + " or " + OPTION_SQL_EXPLAIN_LONG + " must be presented");
}
}
use of org.apache.commons.cli.CommandLineParser in project hive by apache.
the class HiveMetaTool method main.
public static void main(String[] args) {
HiveMetaTool metaTool = new HiveMetaTool();
metaTool.init();
CommandLineParser parser = new GnuParser();
CommandLine line = null;
try {
try {
line = parser.parse(metaTool.cmdLineOptions, args);
} catch (ParseException e) {
System.err.println("HiveMetaTool:Parsing failed. Reason: " + e.getLocalizedMessage());
printAndExit(metaTool);
}
if (line.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("metatool", metaTool.cmdLineOptions);
} else if (line.hasOption("listFSRoot")) {
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
printAndExit(metaTool);
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
printAndExit(metaTool);
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
printAndExit(metaTool);
}
metaTool.listFSRoot();
} else if (line.hasOption("executeJDOQL")) {
String query = line.getOptionValue("executeJDOQL");
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
printAndExit(metaTool);
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
printAndExit(metaTool);
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
printAndExit(metaTool);
}
if (query.toLowerCase().trim().startsWith("select")) {
metaTool.executeJDOQLSelect(query);
} else if (query.toLowerCase().trim().startsWith("update")) {
metaTool.executeJDOQLUpdate(query);
} else {
System.err.println("HiveMetaTool:Unsupported statement type");
printAndExit(metaTool);
}
} else if (line.hasOption("updateLocation")) {
String[] loc = line.getOptionValues("updateLocation");
boolean isDryRun = false;
String serdepropKey = null;
String tablePropKey = null;
if (loc.length != 2 && loc.length != 3) {
System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + "optional arguments but " + "was passed " + loc.length + " arguments");
printAndExit(metaTool);
}
Path newPath = new Path(loc[0]);
Path oldPath = new Path(loc[1]);
URI oldURI = oldPath.toUri();
URI newURI = newPath.toUri();
if (line.hasOption("dryRun")) {
isDryRun = true;
}
if (line.hasOption("serdePropKey")) {
serdepropKey = line.getOptionValue("serdePropKey");
}
if (line.hasOption("tablePropKey")) {
tablePropKey = line.getOptionValue("tablePropKey");
}
/*
* validate input - Both new and old URI should contain valid host names and valid schemes.
* port is optional in both the URIs since HDFS HA NN URI doesn't have a port.
*/
if (oldURI.getHost() == null || newURI.getHost() == null) {
System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
} else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
} else {
metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
}
} else {
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
printAndExit(metaTool);
} else {
System.err.print("HiveMetaTool:Parsing failed. Reason: Invalid arguments: ");
for (String s : line.getArgs()) {
System.err.print(s + " ");
}
System.err.println();
}
printAndExit(metaTool);
}
} finally {
metaTool.shutdownObjectStore();
}
}
use of org.apache.commons.cli.CommandLineParser in project hbase by apache.
the class DataBlockEncodingTool method main.
/**
* A command line interface to benchmarks. Parses command-line arguments and
* runs the appropriate benchmarks.
* @param args Should have length at least 1 and holds the file path to HFile.
* @throws IOException If you specified the wrong file.
*/
public static void main(final String[] args) throws IOException {
// set up user arguments
Options options = new Options();
options.addOption(OPT_HFILE_NAME, true, "HFile to analyse (REQUIRED)");
options.getOption(OPT_HFILE_NAME).setArgName("FILENAME");
options.addOption(OPT_KV_LIMIT, true, "Maximum number of KeyValues to process. A benchmark stops running " + "after iterating over this many KV pairs.");
options.getOption(OPT_KV_LIMIT).setArgName("NUMBER");
options.addOption(OPT_MEASURE_THROUGHPUT, false, "Measure read throughput");
options.addOption(OPT_OMIT_CORRECTNESS_TEST, false, "Omit corectness tests.");
options.addOption(OPT_ENCODING_ALGORITHM, true, "What kind of compression algorithm use for comparison.");
options.addOption(OPT_BENCHMARK_N_TIMES, true, "Number of times to run each benchmark. Default value: " + DEFAULT_BENCHMARK_N_TIMES);
options.addOption(OPT_BENCHMARK_N_OMIT, true, "Number of first runs of every benchmark to exclude from " + "statistics (" + DEFAULT_BENCHMARK_N_OMIT + " by default, so that " + "only the last " + (DEFAULT_BENCHMARK_N_TIMES - DEFAULT_BENCHMARK_N_OMIT) + " times are included in statistics.)");
// parse arguments
CommandLineParser parser = new PosixParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.err.println("Could not parse arguments!");
System.exit(-1);
// avoid warning
return;
}
int kvLimit = Integer.MAX_VALUE;
if (cmd.hasOption(OPT_KV_LIMIT)) {
kvLimit = Integer.parseInt(cmd.getOptionValue(OPT_KV_LIMIT));
}
// basic argument sanity checks
if (!cmd.hasOption(OPT_HFILE_NAME)) {
LOG.error("Please specify HFile name using the " + OPT_HFILE_NAME + " option");
printUsage(options);
System.exit(-1);
}
String pathName = cmd.getOptionValue(OPT_HFILE_NAME);
String compressionName = DEFAULT_COMPRESSION.getName();
if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) {
compressionName = cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(Locale.ROOT);
}
boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT);
boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST);
if (cmd.hasOption(OPT_BENCHMARK_N_TIMES)) {
benchmarkNTimes = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_TIMES));
}
if (cmd.hasOption(OPT_BENCHMARK_N_OMIT)) {
benchmarkNOmit = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_OMIT));
}
if (benchmarkNTimes < benchmarkNOmit) {
LOG.error("The number of times to run each benchmark (" + benchmarkNTimes + ") must be greater than the number of benchmark runs to exclude " + "from statistics (" + benchmarkNOmit + ")");
System.exit(1);
}
LOG.info("Running benchmark " + benchmarkNTimes + " times. " + "Excluding the first " + benchmarkNOmit + " times from statistics.");
final Configuration conf = HBaseConfiguration.create();
try {
testCodecs(conf, kvLimit, pathName, compressionName, doBenchmark, doVerify);
} finally {
(new CacheConfig(conf)).getBlockCache().shutdown();
}
}
use of org.apache.commons.cli.CommandLineParser in project hbase by apache.
the class TestJoinedScanners method main.
/**
* Command line interface:
* @param args
* @throws IOException if there is a bug while reading from disk
*/
public static void main(final String[] args) throws Exception {
Option encodingOption = new Option("e", "blockEncoding", true, "Data block encoding; Default: FAST_DIFF");
encodingOption.setRequired(false);
options.addOption(encodingOption);
Option ratioOption = new Option("r", "selectionRatio", true, "Ratio of selected rows using essential column family");
ratioOption.setRequired(false);
options.addOption(ratioOption);
Option widthOption = new Option("w", "valueWidth", true, "Width of value for non-essential column family");
widthOption.setRequired(false);
options.addOption(widthOption);
CommandLineParser parser = new GnuParser();
CommandLine cmd = parser.parse(options, args);
if (args.length < 1) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("TestJoinedScanners", options, true);
}
if (cmd.hasOption("e")) {
blockEncoding = DataBlockEncoding.valueOf(cmd.getOptionValue("e"));
}
if (cmd.hasOption("r")) {
selectionRatio = Integer.parseInt(cmd.getOptionValue("r"));
}
if (cmd.hasOption("w")) {
valueWidth = Integer.parseInt(cmd.getOptionValue("w"));
}
// run the test
TestJoinedScanners test = new TestJoinedScanners();
test.testJoinedScanners();
}
Aggregations