use of org.apache.commons.cli.GnuParser in project pinot by linkedin.
the class FileBasedServer method processCommandLineArgs.
private static void processCommandLineArgs(String[] cliArgs) throws ParseException {
CommandLineParser cliParser = new GnuParser();
Options cliOptions = buildCommandLineOptions();
CommandLine cmd = cliParser.parse(cliOptions, cliArgs, true);
if (!cmd.hasOption(SERVER_CONFIG_OPT_NAME) || !cmd.hasOption(SERVER_PORT_OPT_NAME)) {
System.err.println("Missing required arguments !!");
System.err.println(cliOptions);
throw new RuntimeException("Missing required arguments !!");
}
_serverConfigPath = cmd.getOptionValue(SERVER_CONFIG_OPT_NAME);
_serverPort = Integer.parseInt(cmd.getOptionValue(SERVER_PORT_OPT_NAME));
}
use of org.apache.commons.cli.GnuParser in project pinot by linkedin.
the class ScatterGatherPerfServer method main.
/**
* @param args
*/
public static void main(String[] args) throws Exception {
CommandLineParser cliParser = new GnuParser();
Options cliOptions = buildCommandLineOptions();
CommandLine cmd = cliParser.parse(cliOptions, args, true);
if (!cmd.hasOption(RESPONSE_SIZE_OPT_NAME) || !cmd.hasOption(SERVER_PORT_OPT_NAME)) {
System.err.println("Missing required arguments !!");
System.err.println(cliOptions);
throw new RuntimeException("Missing required arguments !!");
}
int responseSize = Integer.parseInt(cmd.getOptionValue(RESPONSE_SIZE_OPT_NAME));
int serverPort = Integer.parseInt(cmd.getOptionValue(SERVER_PORT_OPT_NAME));
// 2ms latency
ScatterGatherPerfServer server = new ScatterGatherPerfServer(serverPort, responseSize, 2);
server.run();
}
use of org.apache.commons.cli.GnuParser in project pinot by linkedin.
the class ScatterGatherPerfClient method main.
public static void main(String[] args) throws Exception {
//Process Command Line to get config and port
CommandLineParser cliParser = new GnuParser();
Options cliOptions = buildCommandLineOptions();
CommandLine cmd = cliParser.parse(cliOptions, args, true);
if ((!cmd.hasOption(BROKER_CONFIG_OPT_NAME)) || (!cmd.hasOption(REQUEST_SIZE_OPT_NAME)) || (!cmd.hasOption(TABLE_NAME_OPT_NAME)) || (!cmd.hasOption(TABLE_NAME_OPT_NAME))) {
System.err.println("Missing required arguments !!");
System.err.println(cliOptions);
throw new RuntimeException("Missing required arguments !!");
}
String brokerConfigPath = cmd.getOptionValue(BROKER_CONFIG_OPT_NAME);
int requestSize = Integer.parseInt(cmd.getOptionValue(REQUEST_SIZE_OPT_NAME));
int numRequests = Integer.parseInt(cmd.getOptionValue(NUM_REQUESTS_OPT_NAME));
String resourceName = cmd.getOptionValue(TABLE_NAME_OPT_NAME);
// build brokerConf
PropertiesConfiguration brokerConf = new PropertiesConfiguration();
brokerConf.setDelimiterParsingDisabled(false);
brokerConf.load(brokerConfigPath);
RoutingTableConfig config = new RoutingTableConfig();
config.init(brokerConf.subset(ROUTING_CFG_PREFIX));
ScatterGatherPerfClient client = new ScatterGatherPerfClient(config, requestSize, resourceName, false, numRequests, 1, 1);
client.run();
System.out.println("Shutting down !!");
client.shutdown();
System.out.println("Shut down complete !!");
}
use of org.apache.commons.cli.GnuParser in project bagheera by mozilla-metrics.
the class KafkaSequenceFileConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("o", "output", true, "HDFS base path for output."));
options.addOption(optFactory.create("df", "dateformat", true, "Date format for the date subdirectories."));
options.addOption(optFactory.create("fs", "filesize", true, "Max file size for output files."));
options.addOption(optFactory.create("b", "usebytes", false, "Use BytesWritable for value rather than Text."));
options.addOption(optFactory.create("ts", "addtimestamp", false, "Adds bagheera timestamp to the json"));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
sinkConfig.setString("hdfssink.hdfs.basedir.path", cmd.getOptionValue("output", "/bagheera"));
sinkConfig.setString("hdfssink.hdfs.date.format", cmd.getOptionValue("dateformat", "yyyy-MM-dd"));
sinkConfig.setLong("hdfssink.hdfs.max.filesize", Long.parseLong(cmd.getOptionValue("filesize", "536870912")));
sinkConfig.setBoolean("hdfssink.hdfs.usebytes", cmd.hasOption("usebytes"));
if (cmd.hasOption("addtimestamp")) {
sinkConfig.setBoolean("hdfssink.hdfs.addtimestamp", true);
}
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(SequenceFileSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink for consumer storage
consumer.setSinkFactory(sinkFactory);
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
prepareHealthChecks();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaSequenceFileConsumer.class.getName(), options);
} catch (NumberFormatException e) {
LOG.error("Failed to parse filesize option", e);
}
}
use of org.apache.commons.cli.GnuParser in project bagheera by mozilla-metrics.
the class KafkaHBaseConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("tbl", "table", true, "HBase table name.").required());
options.addOption(optFactory.create("f", "family", true, "Column family."));
options.addOption(optFactory.create("q", "qualifier", true, "Column qualifier."));
options.addOption(optFactory.create("b", "batchsize", true, "Batch size (number of messages per HBase flush)."));
options.addOption(optFactory.create("pd", "prefixdate", false, "Prefix key with salted date."));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
if (cmd.hasOption("numthreads")) {
sinkConfig.setInt("hbasesink.hbase.numthreads", Integer.parseInt(cmd.getOptionValue("numthreads")));
}
if (cmd.hasOption("batchsize")) {
sinkConfig.setInt("hbasesink.hbase.batchsize", Integer.parseInt(cmd.getOptionValue("batchsize")));
}
sinkConfig.setString("hbasesink.hbase.tablename", cmd.getOptionValue("table"));
sinkConfig.setString("hbasesink.hbase.column.family", cmd.getOptionValue("family", "data"));
sinkConfig.setString("hbasesink.hbase.column.qualifier", cmd.getOptionValue("qualifier", "json"));
sinkConfig.setBoolean("hbasesink.hbase.rowkey.prefixdate", cmd.hasOption("prefixdate"));
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(HBaseSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink factory for consumer storage
consumer.setSinkFactory(sinkFactory);
prepareHealthChecks();
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaHBaseConsumer.class.getName(), options);
}
}
Aggregations