use of org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine in project hbase by apache.
the class RegionPlacementMaintainer method main.
public static void main(String[] args) throws IOException {
Options opt = new Options();
opt.addOption("w", "write", false, "write the assignments to hbase:meta only");
opt.addOption("u", "update", false, "update the assignments to hbase:meta and RegionServers together");
opt.addOption("n", "dry-run", false, "do not write assignments to META");
opt.addOption("v", "verify", false, "verify current assignments against META");
opt.addOption("p", "print", false, "print the current assignment plan in META");
opt.addOption("h", "help", false, "print usage");
opt.addOption("d", "verification-details", false, "print the details of verification report");
opt.addOption("zk", true, "to set the zookeeper quorum");
opt.addOption("fs", true, "to set HDFS");
opt.addOption("hbase_root", true, "to set hbase_root directory");
opt.addOption("overwrite", false, "overwrite the favored nodes for a single region," + "for example: -update -r regionName -f server1:port,server2:port,server3:port");
opt.addOption("r", true, "The region name that needs to be updated");
opt.addOption("f", true, "The new favored nodes");
opt.addOption("tables", true, "The list of table names splitted by ',' ;" + "For example: -tables: t1,t2,...,tn");
opt.addOption("l", "locality", true, "enforce the maximum locality");
opt.addOption("m", "min-move", true, "enforce minimum assignment move");
opt.addOption("diff", false, "calculate difference between assignment plans");
opt.addOption("munkres", false, "use munkres to place secondaries and tertiaries");
opt.addOption("ld", "locality-dispersion", false, "print locality and dispersion " + "information for current plan");
try {
CommandLine cmd = new GnuParser().parse(opt, args);
Configuration conf = HBaseConfiguration.create();
boolean enforceMinAssignmentMove = true;
boolean enforceLocality = true;
boolean verificationDetails = false;
// Read all the options
if ((cmd.hasOption("l") && cmd.getOptionValue("l").equalsIgnoreCase("false")) || (cmd.hasOption("locality") && cmd.getOptionValue("locality").equalsIgnoreCase("false"))) {
enforceLocality = false;
}
if ((cmd.hasOption("m") && cmd.getOptionValue("m").equalsIgnoreCase("false")) || (cmd.hasOption("min-move") && cmd.getOptionValue("min-move").equalsIgnoreCase("false"))) {
enforceMinAssignmentMove = false;
}
if (cmd.hasOption("zk")) {
conf.set(HConstants.ZOOKEEPER_QUORUM, cmd.getOptionValue("zk"));
LOG.info("Setting the zk quorum: " + conf.get(HConstants.ZOOKEEPER_QUORUM));
}
if (cmd.hasOption("fs")) {
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, cmd.getOptionValue("fs"));
LOG.info("Setting the HDFS: " + conf.get(FileSystem.FS_DEFAULT_NAME_KEY));
}
if (cmd.hasOption("hbase_root")) {
conf.set(HConstants.HBASE_DIR, cmd.getOptionValue("hbase_root"));
LOG.info("Setting the hbase root directory: " + conf.get(HConstants.HBASE_DIR));
}
// Create the region placement obj
try (RegionPlacementMaintainer rp = new RegionPlacementMaintainer(conf, enforceLocality, enforceMinAssignmentMove)) {
if (cmd.hasOption("d") || cmd.hasOption("verification-details")) {
verificationDetails = true;
}
if (cmd.hasOption("tables")) {
String tableNameListStr = cmd.getOptionValue("tables");
String[] tableNames = StringUtils.split(tableNameListStr, ",");
rp.setTargetTableName(tableNames);
}
if (cmd.hasOption("munkres")) {
USE_MUNKRES_FOR_PLACING_SECONDARY_AND_TERTIARY = true;
}
// Read all the modes
if (cmd.hasOption("v") || cmd.hasOption("verify")) {
// Verify the region placement.
rp.verifyRegionPlacement(verificationDetails);
} else if (cmd.hasOption("n") || cmd.hasOption("dry-run")) {
// Generate the assignment plan only without updating the hbase:meta and RS
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
printAssignmentPlan(plan);
} else if (cmd.hasOption("w") || cmd.hasOption("write")) {
// Generate the new assignment plan
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
// Print the new assignment plan
printAssignmentPlan(plan);
// Write the new assignment plan to META
rp.updateAssignmentPlanToMeta(plan);
} else if (cmd.hasOption("u") || cmd.hasOption("update")) {
// Generate the new assignment plan
FavoredNodesPlan plan = rp.getNewAssignmentPlan();
// Print the new assignment plan
printAssignmentPlan(plan);
// Update the assignment to hbase:meta and Region Servers
rp.updateAssignmentPlan(plan);
} else if (cmd.hasOption("diff")) {
FavoredNodesPlan newPlan = rp.getNewAssignmentPlan();
Map<String, Map<String, Float>> locality = FSUtils.getRegionDegreeLocalityMappingFromFS(conf);
Map<TableName, Integer> movesPerTable = rp.getRegionsMovement(newPlan);
rp.checkDifferencesWithOldPlan(movesPerTable, locality, newPlan);
System.out.println("Do you want to update the assignment plan? [y/n]");
Scanner s = new Scanner(System.in);
String input = s.nextLine().trim();
if (input.equals("y")) {
System.out.println("Updating assignment plan...");
rp.updateAssignmentPlan(newPlan);
}
s.close();
} else if (cmd.hasOption("ld")) {
Map<String, Map<String, Float>> locality = FSUtils.getRegionDegreeLocalityMappingFromFS(conf);
rp.printLocalityAndDispersionForCurrentPlan(locality);
} else if (cmd.hasOption("p") || cmd.hasOption("print")) {
FavoredNodesPlan plan = rp.getRegionAssignmentSnapshot().getExistingAssignmentPlan();
printAssignmentPlan(plan);
} else if (cmd.hasOption("overwrite")) {
if (!cmd.hasOption("f") || !cmd.hasOption("r")) {
throw new IllegalArgumentException("Please specify: " + " -update -r regionName -f server1:port,server2:port,server3:port");
}
String regionName = cmd.getOptionValue("r");
String favoredNodesStr = cmd.getOptionValue("f");
LOG.info("Going to update the region " + regionName + " with the new favored nodes " + favoredNodesStr);
List<ServerName> favoredNodes = null;
RegionInfo regionInfo = rp.getRegionAssignmentSnapshot().getRegionNameToRegionInfoMap().get(regionName);
if (regionInfo == null) {
LOG.error("Cannot find the region " + regionName + " from the META");
} else {
try {
favoredNodes = getFavoredNodeList(favoredNodesStr);
} catch (IllegalArgumentException e) {
LOG.error("Cannot parse the invalid favored nodes because " + e);
}
FavoredNodesPlan newPlan = new FavoredNodesPlan();
newPlan.updateFavoredNodesMap(regionInfo, favoredNodes);
rp.updateAssignmentPlan(newPlan);
}
} else {
printHelp(opt);
}
}
} catch (ParseException e) {
printHelp(opt);
}
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine in project hbase by apache.
the class RegionReplicationLagEvaluation method run.
@Override
public int run(String[] args) throws Exception {
TableName tableName;
int rlen;
int vlen;
int rows;
try {
CommandLine cli = new DefaultParser().parse(OPTIONS, args);
tableName = TableName.valueOf(cli.getOptionValue("t", TABLE_NAME));
rlen = Integer.parseInt(cli.getOptionValue("rlen", String.valueOf(ROW_LENGTH)));
vlen = Integer.parseInt(cli.getOptionValue("vlen", String.valueOf(VALUE_LENGTH)));
rows = Integer.parseInt(cli.getOptionValue("r"));
} catch (Exception e) {
LOG.warn("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(getClass().getName(), OPTIONS);
return -1;
}
exec(tableName, rlen, vlen, rows);
return 0;
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine in project hbase by apache.
the class RestoreDriver method run.
@Override
public int run(String[] args) {
Objects.requireNonNull(conf, "Tool configuration is not initialized");
CommandLine cmd;
try {
// parse the command line arguments
cmd = parseArgs(args);
cmdLineArgs = args;
} catch (Exception e) {
System.out.println("Error when parsing command-line arguments: " + e.getMessage());
printToolUsage();
return EXIT_FAILURE;
}
if (cmd.hasOption(SHORT_HELP_OPTION) || cmd.hasOption(LONG_HELP_OPTION)) {
printToolUsage();
return EXIT_FAILURE;
}
processOptions(cmd);
int ret = EXIT_FAILURE;
try {
ret = doWork();
} catch (Exception e) {
LOG.error("Error running command-line tool", e);
return EXIT_FAILURE;
}
return ret;
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine in project hbase by apache.
the class BackupDriver method run.
@Override
public int run(String[] args) throws IOException {
Objects.requireNonNull(conf, "Tool configuration is not initialized");
CommandLine cmd;
try {
// parse the command line arguments
cmd = parseArgs(args);
cmdLineArgs = args;
} catch (Exception e) {
System.err.println("Error when parsing command-line arguments: " + e.getMessage());
printToolUsage();
return EXIT_FAILURE;
}
processOptions(cmd);
int ret = EXIT_FAILURE;
try {
ret = doWork();
} catch (Exception e) {
LOG.error("Error running command-line tool", e);
return EXIT_FAILURE;
}
return ret;
}
use of org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine in project hbase by apache.
the class CreateRandomStoreFile method run.
/**
* Runs the tools.
*
* @param args command-line arguments
* @return true in case of success
* @throws IOException
*/
public boolean run(String[] args) throws IOException {
options.addOption(OUTPUT_DIR_OPTION, "output_dir", true, "Output directory");
options.addOption(NUM_KV_OPTION, "num_kv", true, "Number of key/value pairs");
options.addOption(KEY_SIZE_OPTION, "key_size", true, "Average key size");
options.addOption(VALUE_SIZE_OPTION, "value_size", true, "Average value size");
options.addOption(HFILE_VERSION_OPTION, "hfile_version", true, "HFile version to create");
options.addOption(COMPRESSION_OPTION, "compression", true, " Compression type, one of " + Arrays.toString(Compression.Algorithm.values()));
options.addOption(BLOOM_FILTER_OPTION, "bloom_filter", true, "Bloom filter type, one of " + Arrays.toString(BloomType.values()));
options.addOption(BLOOM_FILTER_PARAM_OPTION, "bloom_param", true, "the parameter of the bloom filter");
options.addOption(BLOCK_SIZE_OPTION, "block_size", true, "HFile block size");
options.addOption(BLOOM_BLOCK_SIZE_OPTION, "bloom_block_size", true, "Compound Bloom filters block size");
options.addOption(INDEX_BLOCK_SIZE_OPTION, "index_block_size", true, "Index block size");
if (args.length == 0) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(CreateRandomStoreFile.class.getSimpleName(), options, true);
return false;
}
CommandLineParser parser = new PosixParser();
CommandLine cmdLine;
try {
cmdLine = parser.parse(options, args);
} catch (ParseException ex) {
LOG.error(ex.toString(), ex);
return false;
}
if (!cmdLine.hasOption(OUTPUT_DIR_OPTION)) {
LOG.error("Output directory is not specified");
return false;
}
if (!cmdLine.hasOption(NUM_KV_OPTION)) {
LOG.error("The number of keys/values not specified");
return false;
}
if (!cmdLine.hasOption(KEY_SIZE_OPTION)) {
LOG.error("Key size is not specified");
return false;
}
if (!cmdLine.hasOption(VALUE_SIZE_OPTION)) {
LOG.error("Value size not specified");
return false;
}
Configuration conf = HBaseConfiguration.create();
Path outputDir = new Path(cmdLine.getOptionValue(OUTPUT_DIR_OPTION));
long numKV = Long.parseLong(cmdLine.getOptionValue(NUM_KV_OPTION));
configureKeyValue(numKV, Integer.parseInt(cmdLine.getOptionValue(KEY_SIZE_OPTION)), Integer.parseInt(cmdLine.getOptionValue(VALUE_SIZE_OPTION)));
FileSystem fs = FileSystem.get(conf);
Compression.Algorithm compr = Compression.Algorithm.NONE;
if (cmdLine.hasOption(COMPRESSION_OPTION)) {
compr = Compression.Algorithm.valueOf(cmdLine.getOptionValue(COMPRESSION_OPTION));
}
BloomType bloomType = BloomType.NONE;
if (cmdLine.hasOption(BLOOM_FILTER_OPTION)) {
bloomType = BloomType.valueOf(cmdLine.getOptionValue(BLOOM_FILTER_OPTION));
}
if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
if (!cmdLine.hasOption(BLOOM_FILTER_PARAM_OPTION)) {
LOG.error("the parameter of bloom filter is not specified");
return false;
} else {
conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, cmdLine.getOptionValue(BLOOM_FILTER_PARAM_OPTION));
}
}
int blockSize = HConstants.DEFAULT_BLOCKSIZE;
if (cmdLine.hasOption(BLOCK_SIZE_OPTION))
blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION));
if (cmdLine.hasOption(BLOOM_BLOCK_SIZE_OPTION)) {
conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, Integer.valueOf(cmdLine.getOptionValue(BLOOM_BLOCK_SIZE_OPTION)));
}
if (cmdLine.hasOption(INDEX_BLOCK_SIZE_OPTION)) {
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, Integer.valueOf(cmdLine.getOptionValue(INDEX_BLOCK_SIZE_OPTION)));
}
HFileContext meta = new HFileContextBuilder().withCompression(compr).withBlockSize(blockSize).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(conf, new CacheConfig(conf), fs).withOutputDir(outputDir).withBloomType(bloomType).withMaxKeyCount(numKV).withFileContext(meta).build();
rand = new Random();
LOG.info("Writing " + numKV + " key/value pairs");
for (long i = 0; i < numKV; ++i) {
sfw.append(generateKeyValue(i));
}
int numMetaBlocks = rand.nextInt(10) + 1;
LOG.info("Writing " + numMetaBlocks + " meta blocks");
for (int metaI = 0; metaI < numMetaBlocks; ++metaI) {
sfw.getHFileWriter().appendMetaBlock(generateString(), new BytesWritable(generateValue()));
}
sfw.close();
Path storeFilePath = sfw.getPath();
long fileSize = fs.getFileStatus(storeFilePath).getLen();
LOG.info("Created {}, {} bytes, compression={}", storeFilePath, fileSize, compr.toString());
return true;
}
Aggregations