use of org.apache.commons.cli.CommandLineParser in project head by mifos.
the class PPITestDataGenerator method parseOptions.
public void parseOptions(String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
try {
// parse the command line arguments
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION_NAME)) {
showHelp(options);
System.exit(0);
}
if (line.hasOption(TEST_DATA_FILE_OPTION_NAME)) {
if (line.hasOption(TEST_DATA_DIRECTORY_OPTION_NAME)) {
fail("Specify either a data set (-f) or data directory (-a) but not both.");
}
dataSetName = line.getOptionValue(TEST_DATA_FILE_OPTION_NAME);
} else if (line.hasOption(TEST_DATA_DIRECTORY_OPTION_NAME)) {
testDataDirectoryName = line.getOptionValue(TEST_DATA_DIRECTORY_OPTION_NAME);
} else {
fail("Specify either a data set (-f) or data directory (-a)");
}
if (line.hasOption(CLIENT_GLOBAL_ID_OPTION_NAME)) {
clientGlobalId = line.getOptionValue(CLIENT_GLOBAL_ID_OPTION_NAME);
} else {
missingOption(clientGlobalIdOption);
}
} catch (ParseException exp) {
fail("Parsing failed. Reason: " + exp.getMessage());
}
}
use of org.apache.commons.cli.CommandLineParser in project head by mifos.
the class DbUnitDataImportExport method parseOptions.
public void parseOptions(String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
try {
// parse the command line arguments
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION_NAME)) {
showHelp(options);
System.exit(0);
}
if (line.hasOption(FILE_OPTION_NAME)) {
fileName = line.getOptionValue(FILE_OPTION_NAME);
}
if (line.hasOption(USER_OPTION_NAME)) {
user = line.getOptionValue(USER_OPTION_NAME);
} else {
missingOption(userOption);
}
if (line.hasOption(PASSWORD_OPTION_NAME)) {
password = line.getOptionValue(PASSWORD_OPTION_NAME);
} else {
missingOption(passwordOption);
}
if (line.hasOption(DATABASE_OPTION_NAME)) {
databaseName = line.getOptionValue(DATABASE_OPTION_NAME);
}
if (line.hasOption(IMPORT_OPTION_NAME)) {
doExport = false;
} else if (line.hasOption(EXPORT_OPTION_NAME)) {
doExport = true;
if (line.hasOption(SQL_OPTION_NAME)) {
exportAsSql = true;
}
}
} catch (ParseException exp) {
fail("Parsing failed. Reason: " + exp.getMessage());
}
}
use of org.apache.commons.cli.CommandLineParser in project bagheera by mozilla-metrics.
the class KafkaSequenceFileConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("o", "output", true, "HDFS base path for output."));
options.addOption(optFactory.create("df", "dateformat", true, "Date format for the date subdirectories."));
options.addOption(optFactory.create("fs", "filesize", true, "Max file size for output files."));
options.addOption(optFactory.create("b", "usebytes", false, "Use BytesWritable for value rather than Text."));
options.addOption(optFactory.create("ts", "addtimestamp", false, "Adds bagheera timestamp to the json"));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
sinkConfig.setString("hdfssink.hdfs.basedir.path", cmd.getOptionValue("output", "/bagheera"));
sinkConfig.setString("hdfssink.hdfs.date.format", cmd.getOptionValue("dateformat", "yyyy-MM-dd"));
sinkConfig.setLong("hdfssink.hdfs.max.filesize", Long.parseLong(cmd.getOptionValue("filesize", "536870912")));
sinkConfig.setBoolean("hdfssink.hdfs.usebytes", cmd.hasOption("usebytes"));
if (cmd.hasOption("addtimestamp")) {
sinkConfig.setBoolean("hdfssink.hdfs.addtimestamp", true);
}
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(SequenceFileSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink for consumer storage
consumer.setSinkFactory(sinkFactory);
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
prepareHealthChecks();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaSequenceFileConsumer.class.getName(), options);
} catch (NumberFormatException e) {
LOG.error("Failed to parse filesize option", e);
}
}
use of org.apache.commons.cli.CommandLineParser in project bagheera by mozilla-metrics.
the class KafkaHBaseConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("tbl", "table", true, "HBase table name.").required());
options.addOption(optFactory.create("f", "family", true, "Column family."));
options.addOption(optFactory.create("q", "qualifier", true, "Column qualifier."));
options.addOption(optFactory.create("b", "batchsize", true, "Batch size (number of messages per HBase flush)."));
options.addOption(optFactory.create("pd", "prefixdate", false, "Prefix key with salted date."));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
if (cmd.hasOption("numthreads")) {
sinkConfig.setInt("hbasesink.hbase.numthreads", Integer.parseInt(cmd.getOptionValue("numthreads")));
}
if (cmd.hasOption("batchsize")) {
sinkConfig.setInt("hbasesink.hbase.batchsize", Integer.parseInt(cmd.getOptionValue("batchsize")));
}
sinkConfig.setString("hbasesink.hbase.tablename", cmd.getOptionValue("table"));
sinkConfig.setString("hbasesink.hbase.column.family", cmd.getOptionValue("family", "data"));
sinkConfig.setString("hbasesink.hbase.column.qualifier", cmd.getOptionValue("qualifier", "json"));
sinkConfig.setBoolean("hbasesink.hbase.rowkey.prefixdate", cmd.hasOption("prefixdate"));
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(HBaseSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink factory for consumer storage
consumer.setSinkFactory(sinkFactory);
prepareHealthChecks();
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaHBaseConsumer.class.getName(), options);
}
}
use of org.apache.commons.cli.CommandLineParser in project henplus by neurolabs.
the class HenPlus method readCommandLineOptions.
/**
* @param argv
*/
private void readCommandLineOptions(final String[] argv) {
final Options availableOptions = getMainOptions();
registerCommandOptions(availableOptions);
final CommandLineParser parser = new PosixParser();
CommandLine line = null;
try {
line = parser.parse(availableOptions, argv);
if (line.hasOption('h')) {
usageAndExit(availableOptions, 0);
}
if (line.hasOption('s')) {
_quiet = true;
}
if (line.hasOption('v')) {
_verbose = true;
}
handleCommandOptions(line);
} catch (final Exception e) {
Logger.error("Error handling command line arguments", e);
usageAndExit(availableOptions, 1);
}
}
Aggregations