use of org.apache.commons.cli.CommandLine in project phoenix by apache.
the class AbstractBulkLoadTool method run.
@Override
public int run(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create(getConf());
CommandLine cmdLine = null;
try {
cmdLine = parseOptions(args);
} catch (IllegalStateException e) {
printHelpAndExit(e.getMessage(), getOptions());
}
return loadData(conf, cmdLine);
}
use of org.apache.commons.cli.CommandLine in project lucene-solr by apache.
the class SolrSnapshotsTool method main.
public static void main(String[] args) throws IOException {
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption(null, CREATE, true, "This command will create a snapshot with the specified name");
options.addOption(null, DELETE, true, "This command will delete a snapshot with the specified name");
options.addOption(null, LIST, false, "This command will list all the named snapshots for the specified collection.");
options.addOption(null, DESCRIBE, true, "This command will print details for a named snapshot for the specified collection.");
options.addOption(null, PREPARE_FOR_EXPORT, true, "This command will prepare copylistings for the specified snapshot." + " This command should only be used only if Solr is deployed with Hadoop and collection index files are stored on a shared" + " file-system e.g. HDFS");
options.addOption(null, EXPORT_SNAPSHOT, true, "This command will create a backup for the specified snapshot.");
options.addOption(null, HELP, false, "This command will print the help message for the snapshots related commands.");
options.addOption(TEMP_DIR, true, "This parameter specifies the path of a temporary directory on local filesystem" + " during prepare-snapshot-export command.");
options.addOption(DEST_DIR, true, "This parameter specifies the path on shared file-system (e.g. HDFS) where the snapshot related" + " information should be stored.");
options.addOption(COLLECTION, true, "This parameter specifies the name of the collection to be used during snapshot operation");
options.addOption(SOLR_ZK_ENSEMBLE, true, "This parameter specifies the Solr Zookeeper ensemble address");
options.addOption(HDFS_PATH_PREFIX, true, "This parameter specifies the HDFS URI prefix to be used" + " during snapshot export preparation. This is applicable only if the Solr collection index files are stored on HDFS.");
options.addOption(BACKUP_REPO_NAME, true, "This parameter specifies the name of the backup repository to be used" + " during snapshot export preparation");
options.addOption(ASYNC_REQ_ID, true, "This parameter specifies the async request identifier to be used" + " during snapshot export preparation");
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.out.println(e.getLocalizedMessage());
printHelp(options);
System.exit(1);
}
if (cmd.hasOption(CREATE) || cmd.hasOption(DELETE) || cmd.hasOption(LIST) || cmd.hasOption(DESCRIBE) || cmd.hasOption(PREPARE_FOR_EXPORT) || cmd.hasOption(EXPORT_SNAPSHOT)) {
try (SolrSnapshotsTool tool = new SolrSnapshotsTool(requiredArg(options, cmd, SOLR_ZK_ENSEMBLE))) {
if (cmd.hasOption(CREATE)) {
String snapshotName = cmd.getOptionValue(CREATE);
String collectionName = requiredArg(options, cmd, COLLECTION);
tool.createSnapshot(collectionName, snapshotName);
} else if (cmd.hasOption(DELETE)) {
String snapshotName = cmd.getOptionValue(DELETE);
String collectionName = requiredArg(options, cmd, COLLECTION);
tool.deleteSnapshot(collectionName, snapshotName);
} else if (cmd.hasOption(LIST)) {
String collectionName = requiredArg(options, cmd, COLLECTION);
tool.listSnapshots(collectionName);
} else if (cmd.hasOption(DESCRIBE)) {
String snapshotName = cmd.getOptionValue(DESCRIBE);
String collectionName = requiredArg(options, cmd, COLLECTION);
tool.describeSnapshot(collectionName, snapshotName);
} else if (cmd.hasOption(PREPARE_FOR_EXPORT)) {
String snapshotName = cmd.getOptionValue(PREPARE_FOR_EXPORT);
String collectionName = requiredArg(options, cmd, COLLECTION);
String localFsDir = requiredArg(options, cmd, TEMP_DIR);
String hdfsOpDir = requiredArg(options, cmd, DEST_DIR);
Optional<String> pathPrefix = Optional.ofNullable(cmd.getOptionValue(HDFS_PATH_PREFIX));
if (pathPrefix.isPresent()) {
try {
new URI(pathPrefix.get());
} catch (URISyntaxException e) {
System.out.println("The specified File system path prefix " + pathPrefix.get() + " is invalid. The error is " + e.getLocalizedMessage());
System.exit(1);
}
}
tool.prepareForExport(collectionName, snapshotName, localFsDir, pathPrefix, hdfsOpDir);
} else if (cmd.hasOption(EXPORT_SNAPSHOT)) {
String snapshotName = cmd.getOptionValue(EXPORT_SNAPSHOT);
String collectionName = requiredArg(options, cmd, COLLECTION);
String destDir = requiredArg(options, cmd, DEST_DIR);
Optional<String> backupRepo = Optional.ofNullable(cmd.getOptionValue(BACKUP_REPO_NAME));
Optional<String> asyncReqId = Optional.ofNullable(cmd.getOptionValue(ASYNC_REQ_ID));
tool.exportSnapshot(collectionName, snapshotName, destDir, backupRepo, asyncReqId);
}
}
} else if (cmd.hasOption(HELP)) {
printHelp(options);
} else {
System.out.println("Unknown command specified.");
printHelp(options);
}
}
use of org.apache.commons.cli.CommandLine in project lucene-solr by apache.
the class SolrCLI method parseCmdLine.
public static CommandLine parseCmdLine(String[] args, Option[] toolOptions) throws Exception {
// the parser doesn't like -D props
List<String> toolArgList = new ArrayList<String>();
List<String> dashDList = new ArrayList<String>();
for (int a = 1; a < args.length; a++) {
String arg = args[a];
if (arg.startsWith("-D")) {
dashDList.add(arg);
} else {
toolArgList.add(arg);
}
}
String[] toolArgs = toolArgList.toArray(new String[0]);
// process command-line args to configure this application
CommandLine cli = processCommandLineArgs(joinCommonAndToolOptions(toolOptions), toolArgs);
List argList = cli.getArgList();
argList.addAll(dashDList);
// for SSL support, try to accommodate relative paths set for SSL store props
String solrInstallDir = System.getProperty("solr.install.dir");
if (solrInstallDir != null) {
checkSslStoreSysProp(solrInstallDir, "keyStore");
checkSslStoreSysProp(solrInstallDir, "trustStore");
}
return cli;
}
use of org.apache.commons.cli.CommandLine in project lucene-solr by apache.
the class SolrCloudExampleTest method doTestConfigUpdate.
/**
* Uses the SolrCLI config action to activate soft auto-commits for the getting started collection.
*/
protected void doTestConfigUpdate(String testCollectionName, String solrUrl) throws Exception {
if (!solrUrl.endsWith("/"))
solrUrl += "/";
String configUrl = solrUrl + testCollectionName + "/config";
Map<String, Object> configJson = SolrCLI.getJson(configUrl);
Object maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson);
assertNotNull(maxTimeFromConfig);
assertEquals(new Long(-1L), maxTimeFromConfig);
String prop = "updateHandler.autoSoftCommit.maxTime";
Long maxTime = new Long(3000L);
String[] args = new String[] { "-collection", testCollectionName, "-property", prop, "-value", maxTime.toString(), "-solrUrl", solrUrl };
Map<String, Long> startTimes = getSoftAutocommitInterval(testCollectionName);
SolrCLI.ConfigTool tool = new SolrCLI.ConfigTool();
CommandLine cli = SolrCLI.processCommandLineArgs(SolrCLI.joinCommonAndToolOptions(tool.getOptions()), args);
log.info("Sending set-property '" + prop + "'=" + maxTime + " to SolrCLI.ConfigTool.");
assertTrue("Set config property failed!", tool.runTool(cli) == 0);
configJson = SolrCLI.getJson(configUrl);
maxTimeFromConfig = SolrCLI.atPath("/config/updateHandler/autoSoftCommit/maxTime", configJson);
assertNotNull(maxTimeFromConfig);
assertEquals(maxTime, maxTimeFromConfig);
// Just check that we can access paths with slashes in them both through an intermediate method and explicitly
// using atPath.
assertEquals("Should have been able to get a value from the /query request handler", "explicit", SolrCLI.asString("/config/requestHandler/\\/query/defaults/echoParams", configJson));
assertEquals("Should have been able to get a value from the /query request handler", "explicit", SolrCLI.atPath("/config/requestHandler/\\/query/defaults/echoParams", configJson));
log.info("live_nodes_count : " + cloudClient.getZkStateReader().getClusterState().getLiveNodes());
// Since it takes some time for this command to complete we need to make sure all the reloads for
// all the cores have been done.
boolean allGood = false;
Map<String, Long> curSoftCommitInterval = null;
for (int idx = 0; idx < 600 && allGood == false; ++idx) {
curSoftCommitInterval = getSoftAutocommitInterval(testCollectionName);
if (curSoftCommitInterval.size() > 0 && curSoftCommitInterval.size() == startTimes.size()) {
// no point in even trying if they're not the same size!
allGood = true;
for (Map.Entry<String, Long> currEntry : curSoftCommitInterval.entrySet()) {
if (currEntry.getValue().equals(maxTime) == false) {
allGood = false;
}
}
}
if (allGood == false) {
Thread.sleep(100);
}
}
assertTrue("All cores should have been reloaded within 60 seconds!!!", allGood);
}
use of org.apache.commons.cli.CommandLine in project phoenix by apache.
the class PhoenixConsumerTool method run.
@Override
public int run(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create(getConf());
CommandLine cmdLine = null;
try {
cmdLine = parseOptions(args);
} catch (IllegalStateException e) {
printHelpAndExit(e.getMessage(), getOptions());
}
String path = cmdLine.getOptionValue(FILE_PATH_OPT.getOpt());
conf.set("kafka.consumer.file", path);
new PhoenixConsumer(conf);
return 1;
}
Aggregations