use of org.apache.hbase.thirdparty.org.apache.commons.cli.Options in project hbase by apache.
the class RSGroupMajorCompactionTTL method getOptions.
protected Options getOptions() {
Options options = getCommonOptions();
options.addOption(Option.builder("rsgroup").required().desc("Tables of rsgroup to be compacted").hasArg().build());
return options;
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.Options in project hbase by apache.
the class DataBlockEncodingTool method main.
/**
* A command line interface to benchmarks. Parses command-line arguments and
* runs the appropriate benchmarks.
* @param args Should have length at least 1 and holds the file path to HFile.
* @throws IOException If you specified the wrong file.
*/
public static void main(final String[] args) throws IOException {
// set up user arguments
Options options = new Options();
options.addOption(OPT_HFILE_NAME, true, "HFile to analyse (REQUIRED)");
options.getOption(OPT_HFILE_NAME).setArgName("FILENAME");
options.addOption(OPT_KV_LIMIT, true, "Maximum number of KeyValues to process. A benchmark stops running " + "after iterating over this many KV pairs.");
options.getOption(OPT_KV_LIMIT).setArgName("NUMBER");
options.addOption(OPT_MEASURE_THROUGHPUT, false, "Measure read throughput");
options.addOption(OPT_OMIT_CORRECTNESS_TEST, false, "Omit corectness tests.");
options.addOption(OPT_COMPRESSION_ALGORITHM, true, "What kind of compression algorithm use for comparison.");
options.addOption(OPT_BENCHMARK_N_TIMES, true, "Number of times to run each benchmark. Default value: " + DEFAULT_BENCHMARK_N_TIMES);
options.addOption(OPT_BENCHMARK_N_OMIT, true, "Number of first runs of every benchmark to exclude from " + "statistics (" + DEFAULT_BENCHMARK_N_OMIT + " by default, so that " + "only the last " + (DEFAULT_BENCHMARK_N_TIMES - DEFAULT_BENCHMARK_N_OMIT) + " times are included in statistics.)");
// parse arguments
CommandLineParser parser = new PosixParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.err.println("Could not parse arguments!");
System.exit(-1);
// avoid warning
return;
}
int kvLimit = Integer.MAX_VALUE;
if (cmd.hasOption(OPT_KV_LIMIT)) {
kvLimit = Integer.parseInt(cmd.getOptionValue(OPT_KV_LIMIT));
if (kvLimit <= 0) {
LOG.error("KV_LIMIT should not less than 1.");
}
}
// basic argument sanity checks
if (!cmd.hasOption(OPT_HFILE_NAME)) {
LOG.error("Please specify HFile name using the " + OPT_HFILE_NAME + " option");
printUsage(options);
System.exit(-1);
}
String pathName = cmd.getOptionValue(OPT_HFILE_NAME);
String compressionName = DEFAULT_COMPRESSION.getName();
if (cmd.hasOption(OPT_COMPRESSION_ALGORITHM)) {
compressionName = cmd.getOptionValue(OPT_COMPRESSION_ALGORITHM).toLowerCase(Locale.ROOT);
}
boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT);
boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST);
if (cmd.hasOption(OPT_BENCHMARK_N_TIMES)) {
benchmarkNTimes = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_TIMES));
}
if (cmd.hasOption(OPT_BENCHMARK_N_OMIT)) {
benchmarkNOmit = Integer.valueOf(cmd.getOptionValue(OPT_BENCHMARK_N_OMIT));
}
if (benchmarkNTimes < benchmarkNOmit) {
LOG.error("The number of times to run each benchmark (" + benchmarkNTimes + ") must be greater than the number of benchmark runs to exclude " + "from statistics (" + benchmarkNOmit + ")");
System.exit(1);
}
LOG.info("Running benchmark " + benchmarkNTimes + " times. " + "Excluding the first " + benchmarkNOmit + " times from statistics.");
final Configuration conf = HBaseConfiguration.create();
testCodecs(conf, kvLimit, pathName, compressionName, doBenchmark, doVerify);
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.Options in project hbase by apache.
the class MajorCompactor method run.
@Override
public int run(String[] args) throws Exception {
Options options = getCommonOptions();
options.addOption(Option.builder("table").required().desc("table name").hasArg().build());
options.addOption(Option.builder("cf").optionalArg(true).desc("column families: comma separated eg: a,b,c").hasArg().build());
final CommandLineParser cmdLineParser = new DefaultParser();
CommandLine commandLine = null;
try {
commandLine = cmdLineParser.parse(options, args);
} catch (ParseException parseException) {
System.out.println("ERROR: Unable to parse command-line arguments " + Arrays.toString(args) + " due to: " + parseException);
printUsage(options);
return -1;
}
if (commandLine == null) {
System.out.println("ERROR: Failed parse, empty commandLine; " + Arrays.toString(args));
printUsage(options);
return -1;
}
String tableName = commandLine.getOptionValue("table");
String cf = commandLine.getOptionValue("cf", null);
Set<String> families = Sets.newHashSet();
if (cf != null) {
Iterables.addAll(families, Splitter.on(",").split(cf));
}
Configuration configuration = getConf();
int concurrency = Integer.parseInt(commandLine.getOptionValue("servers"));
long minModTime = Long.parseLong(commandLine.getOptionValue("minModTime", String.valueOf(EnvironmentEdgeManager.currentTime())));
String quorum = commandLine.getOptionValue("zk", configuration.get(HConstants.ZOOKEEPER_QUORUM));
String rootDir = commandLine.getOptionValue("rootDir", configuration.get(HConstants.HBASE_DIR));
long sleep = Long.parseLong(commandLine.getOptionValue("sleep", Long.toString(30000)));
int numServers = Integer.parseInt(commandLine.getOptionValue("numservers", "-1"));
int numRegions = Integer.parseInt(commandLine.getOptionValue("numregions", "-1"));
configuration.set(HConstants.HBASE_DIR, rootDir);
configuration.set(HConstants.ZOOKEEPER_QUORUM, quorum);
MajorCompactor compactor = new MajorCompactor(configuration, TableName.valueOf(tableName), families, concurrency, minModTime, sleep);
compactor.setNumServers(numServers);
compactor.setNumRegions(numRegions);
compactor.setSkipWait(commandLine.hasOption("skipWait"));
compactor.initializeWorkQueues();
if (!commandLine.hasOption("dryRun")) {
compactor.compactAllRegions();
}
compactor.shutdown();
return ERRORS.size();
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.Options in project hbase by apache.
the class MajorCompactor method getCommonOptions.
protected Options getCommonOptions() {
Options options = new Options();
options.addOption(Option.builder("servers").required().desc("Concurrent servers compacting").hasArg().build());
options.addOption(Option.builder("minModTime").desc("Compact if store files have modification time < minModTime").hasArg().build());
options.addOption(Option.builder("zk").optionalArg(true).desc("zk quorum").hasArg().build());
options.addOption(Option.builder("rootDir").optionalArg(true).desc("hbase.rootDir").hasArg().build());
options.addOption(Option.builder("sleep").desc("Time to sleepForMs (ms) for checking compaction status per region and available " + "work queues: default 30s").hasArg().build());
options.addOption(Option.builder("retries").desc("Max # of retries for a compaction request," + " defaults to 3").hasArg().build());
options.addOption(Option.builder("dryRun").desc("Dry run, will just output a list of regions that require compaction based on " + "parameters passed").hasArg(false).build());
options.addOption(Option.builder("skipWait").desc("Skip waiting after triggering compaction.").hasArg(false).build());
options.addOption(Option.builder("numservers").optionalArg(true).desc("Number of servers to compact in this run, defaults to all").hasArg().build());
options.addOption(Option.builder("numregions").optionalArg(true).desc("Number of regions to compact per server, defaults to all").hasArg().build());
return options;
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.Options in project hbase by apache.
the class MajorCompactorTTL method getOptions.
private Options getOptions() {
Options options = getCommonOptions();
options.addOption(Option.builder("table").required().desc("table name").hasArg().build());
return options;
}
Aggregations