use of org.apache.commons.cli.HelpFormatter in project head by mifos.
the class PPITestDataGenerator method showHelp.
private static void showHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("PPITestDataGenerator", options);
}
use of org.apache.commons.cli.HelpFormatter in project head by mifos.
the class DataSetUpgradeUtil method showHelp.
private static void showHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("DbUnitDataExport", options);
}
use of org.apache.commons.cli.HelpFormatter in project head by mifos.
the class DbUnitDataImportExport method showHelp.
private static void showHelp(Options options) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("DbUnitDataExport", options);
}
use of org.apache.commons.cli.HelpFormatter in project bagheera by mozilla-metrics.
the class KafkaSequenceFileConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("o", "output", true, "HDFS base path for output."));
options.addOption(optFactory.create("df", "dateformat", true, "Date format for the date subdirectories."));
options.addOption(optFactory.create("fs", "filesize", true, "Max file size for output files."));
options.addOption(optFactory.create("b", "usebytes", false, "Use BytesWritable for value rather than Text."));
options.addOption(optFactory.create("ts", "addtimestamp", false, "Adds bagheera timestamp to the json"));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
sinkConfig.setString("hdfssink.hdfs.basedir.path", cmd.getOptionValue("output", "/bagheera"));
sinkConfig.setString("hdfssink.hdfs.date.format", cmd.getOptionValue("dateformat", "yyyy-MM-dd"));
sinkConfig.setLong("hdfssink.hdfs.max.filesize", Long.parseLong(cmd.getOptionValue("filesize", "536870912")));
sinkConfig.setBoolean("hdfssink.hdfs.usebytes", cmd.hasOption("usebytes"));
if (cmd.hasOption("addtimestamp")) {
sinkConfig.setBoolean("hdfssink.hdfs.addtimestamp", true);
}
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(SequenceFileSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink for consumer storage
consumer.setSinkFactory(sinkFactory);
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
prepareHealthChecks();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaSequenceFileConsumer.class.getName(), options);
} catch (NumberFormatException e) {
LOG.error("Failed to parse filesize option", e);
}
}
use of org.apache.commons.cli.HelpFormatter in project bagheera by mozilla-metrics.
the class KafkaHBaseConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("tbl", "table", true, "HBase table name.").required());
options.addOption(optFactory.create("f", "family", true, "Column family."));
options.addOption(optFactory.create("q", "qualifier", true, "Column qualifier."));
options.addOption(optFactory.create("b", "batchsize", true, "Batch size (number of messages per HBase flush)."));
options.addOption(optFactory.create("pd", "prefixdate", false, "Prefix key with salted date."));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
if (cmd.hasOption("numthreads")) {
sinkConfig.setInt("hbasesink.hbase.numthreads", Integer.parseInt(cmd.getOptionValue("numthreads")));
}
if (cmd.hasOption("batchsize")) {
sinkConfig.setInt("hbasesink.hbase.batchsize", Integer.parseInt(cmd.getOptionValue("batchsize")));
}
sinkConfig.setString("hbasesink.hbase.tablename", cmd.getOptionValue("table"));
sinkConfig.setString("hbasesink.hbase.column.family", cmd.getOptionValue("family", "data"));
sinkConfig.setString("hbasesink.hbase.column.qualifier", cmd.getOptionValue("qualifier", "json"));
sinkConfig.setBoolean("hbasesink.hbase.rowkey.prefixdate", cmd.hasOption("prefixdate"));
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(HBaseSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink factory for consumer storage
consumer.setSinkFactory(sinkFactory);
prepareHealthChecks();
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaHBaseConsumer.class.getName(), options);
}
}
Aggregations