use of org.apache.commons.cli.CommandLine in project hive by apache.
the class HBaseSchemaTool method main.
public static void main(String[] args) {
Options options = new Options();
options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
options.addOption(OptionBuilder.withLongOpt("install").withDescription("Install the schema onto an HBase cluster.").create('i'));
options.addOption(OptionBuilder.withLongOpt("key").withDescription("Key to scan with. This should be an exact key (not a regular expression").hasArg().create('k'));
options.addOption(OptionBuilder.withLongOpt("list-tables").withDescription("List tables in HBase metastore").create('l'));
options.addOption(OptionBuilder.withLongOpt("regex-key").withDescription("Regular expression to scan keys with.").hasArg().create('r'));
options.addOption(OptionBuilder.withLongOpt("table").withDescription("HBase metastore table to scan").hasArg().create('t'));
CommandLine cli = null;
try {
cli = new GnuParser().parse(options, args);
} catch (ParseException e) {
System.err.println("Parse Exception: " + e.getMessage());
usage(options);
return;
}
if (cli.hasOption('h')) {
usage(options);
return;
}
Configuration conf = new Configuration();
if (cli.hasOption('i')) {
new HBaseSchemaTool().install(conf, System.err);
return;
}
String key = null;
if (cli.hasOption('k'))
key = cli.getOptionValue('k');
String regex = null;
if (cli.hasOption('r'))
regex = cli.getOptionValue('r');
if (key != null && regex != null) {
usage(options);
return;
}
if (key == null && regex == null)
regex = ".*";
// I do this in the object rather than in the static main so that it's easier to test.
new HBaseSchemaTool().go(cli.hasOption('l'), cli.getOptionValue('t'), key, regex, conf, System.out, System.err);
}
use of org.apache.commons.cli.CommandLine in project hive by apache.
the class HBaseImport method init.
private int init(String... args) throws ParseException {
Options options = new Options();
doAll = doKerberos = false;
parallel = 1;
batchSize = 1000;
options.addOption(OptionBuilder.withLongOpt("all").withDescription("Import the full metastore").create('a'));
options.addOption(OptionBuilder.withLongOpt("batchsize").withDescription("Number of partitions to read and write in a batch, defaults to 1000").hasArg().create('b'));
options.addOption(OptionBuilder.withLongOpt("database").withDescription("Import a single database").hasArgs().create('d'));
options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
options.addOption(OptionBuilder.withLongOpt("function").withDescription("Import a single function").hasArgs().create('f'));
options.addOption(OptionBuilder.withLongOpt("kerberos").withDescription("Import all kerberos related objects (master key, tokens)").create('k'));
options.addOption(OptionBuilder.withLongOpt("parallel").withDescription("Parallel factor for loading (only applied to tables and partitions), " + "defaults to 1").hasArg().create('p'));
options.addOption(OptionBuilder.withLongOpt("role").withDescription("Import a single role").hasArgs().create('r'));
options.addOption(OptionBuilder.withLongOpt("tables").withDescription("Import a single tables").hasArgs().create('t'));
CommandLine cli = new GnuParser().parse(options, args);
// Process help, if it was asked for, this must be done first
if (cli.hasOption('h')) {
printHelp(options);
return 1;
}
boolean hasCmd = false;
// Now process the other command line args
if (cli.hasOption('a')) {
hasCmd = true;
doAll = true;
}
if (cli.hasOption('b')) {
batchSize = Integer.parseInt(cli.getOptionValue('b'));
}
if (cli.hasOption('d')) {
hasCmd = true;
dbsToImport = Arrays.asList(cli.getOptionValues('d'));
}
if (cli.hasOption('f')) {
hasCmd = true;
functionsToImport = Arrays.asList(cli.getOptionValues('f'));
}
if (cli.hasOption('p')) {
parallel = Integer.parseInt(cli.getOptionValue('p'));
}
if (cli.hasOption('r')) {
hasCmd = true;
rolesToImport = Arrays.asList(cli.getOptionValues('r'));
}
if (cli.hasOption('k')) {
doKerberos = true;
}
if (cli.hasOption('t')) {
hasCmd = true;
tablesToImport = Arrays.asList(cli.getOptionValues('t'));
}
if (!hasCmd) {
printHelp(options);
return 1;
}
dbs = new ArrayList<>();
// We don't want to bound the size of the table queue because we keep it all in memory
partitionedTables = new LinkedBlockingQueue<>();
tableNameQueue = new LinkedBlockingQueue<>();
indexNameQueue = new LinkedBlockingQueue<>();
// Bound the size of this queue so we don't get too much in memory.
partQueue = new ArrayBlockingQueue<>(parallel * 2);
return 0;
}
use of org.apache.commons.cli.CommandLine in project head by mifos.
the class PPITestDataGenerator method parseOptions.
public void parseOptions(String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
try {
// parse the command line arguments
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION_NAME)) {
showHelp(options);
System.exit(0);
}
if (line.hasOption(TEST_DATA_FILE_OPTION_NAME)) {
if (line.hasOption(TEST_DATA_DIRECTORY_OPTION_NAME)) {
fail("Specify either a data set (-f) or data directory (-a) but not both.");
}
dataSetName = line.getOptionValue(TEST_DATA_FILE_OPTION_NAME);
} else if (line.hasOption(TEST_DATA_DIRECTORY_OPTION_NAME)) {
testDataDirectoryName = line.getOptionValue(TEST_DATA_DIRECTORY_OPTION_NAME);
} else {
fail("Specify either a data set (-f) or data directory (-a)");
}
if (line.hasOption(CLIENT_GLOBAL_ID_OPTION_NAME)) {
clientGlobalId = line.getOptionValue(CLIENT_GLOBAL_ID_OPTION_NAME);
} else {
missingOption(clientGlobalIdOption);
}
} catch (ParseException exp) {
fail("Parsing failed. Reason: " + exp.getMessage());
}
}
use of org.apache.commons.cli.CommandLine in project head by mifos.
the class DbUnitDataImportExport method parseOptions.
public void parseOptions(String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
try {
// parse the command line arguments
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION_NAME)) {
showHelp(options);
System.exit(0);
}
if (line.hasOption(FILE_OPTION_NAME)) {
fileName = line.getOptionValue(FILE_OPTION_NAME);
}
if (line.hasOption(USER_OPTION_NAME)) {
user = line.getOptionValue(USER_OPTION_NAME);
} else {
missingOption(userOption);
}
if (line.hasOption(PASSWORD_OPTION_NAME)) {
password = line.getOptionValue(PASSWORD_OPTION_NAME);
} else {
missingOption(passwordOption);
}
if (line.hasOption(DATABASE_OPTION_NAME)) {
databaseName = line.getOptionValue(DATABASE_OPTION_NAME);
}
if (line.hasOption(IMPORT_OPTION_NAME)) {
doExport = false;
} else if (line.hasOption(EXPORT_OPTION_NAME)) {
doExport = true;
if (line.hasOption(SQL_OPTION_NAME)) {
exportAsSql = true;
}
}
} catch (ParseException exp) {
fail("Parsing failed. Reason: " + exp.getMessage());
}
}
use of org.apache.commons.cli.CommandLine in project languagetool by languagetool-org.
the class DictionaryExporter method main.
public static void main(String[] args) throws Exception {
BuilderOptions builderOptions = new BuilderOptions();
builderOptions.addOption(BuilderOptions.INPUT_OPTION, true, "binary Morfologik dictionary file (.dict)", true);
builderOptions.addOption(BuilderOptions.INFO_OPTION, true, BuilderOptions.INFO_HELP, true);
CommandLine cmdLine = builderOptions.parseArguments(args, DictionaryExporter.class);
File binaryDictFile = new File(cmdLine.getOptionValue(BuilderOptions.INPUT_OPTION));
File infoFile = new File(cmdLine.getOptionValue(BuilderOptions.INFO_OPTION));
DictionaryExporter builder = new DictionaryExporter(infoFile);
builder.setOutputFilename(cmdLine.getOptionValue(BuilderOptions.OUTPUT_OPTION));
builder.build(binaryDictFile);
}
Aggregations