use of org.apache.commons.cli.Options in project hive by apache.
the class HBaseSchemaTool method main.
public static void main(String[] args) {
Options options = new Options();
options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
options.addOption(OptionBuilder.withLongOpt("install").withDescription("Install the schema onto an HBase cluster.").create('i'));
options.addOption(OptionBuilder.withLongOpt("key").withDescription("Key to scan with. This should be an exact key (not a regular expression").hasArg().create('k'));
options.addOption(OptionBuilder.withLongOpt("list-tables").withDescription("List tables in HBase metastore").create('l'));
options.addOption(OptionBuilder.withLongOpt("regex-key").withDescription("Regular expression to scan keys with.").hasArg().create('r'));
options.addOption(OptionBuilder.withLongOpt("table").withDescription("HBase metastore table to scan").hasArg().create('t'));
CommandLine cli = null;
try {
cli = new GnuParser().parse(options, args);
} catch (ParseException e) {
System.err.println("Parse Exception: " + e.getMessage());
usage(options);
return;
}
if (cli.hasOption('h')) {
usage(options);
return;
}
Configuration conf = new Configuration();
if (cli.hasOption('i')) {
new HBaseSchemaTool().install(conf, System.err);
return;
}
String key = null;
if (cli.hasOption('k'))
key = cli.getOptionValue('k');
String regex = null;
if (cli.hasOption('r'))
regex = cli.getOptionValue('r');
if (key != null && regex != null) {
usage(options);
return;
}
if (key == null && regex == null)
regex = ".*";
// I do this in the object rather than in the static main so that it's easier to test.
new HBaseSchemaTool().go(cli.hasOption('l'), cli.getOptionValue('t'), key, regex, conf, System.out, System.err);
}
use of org.apache.commons.cli.Options in project hive by apache.
the class HBaseImport method init.
private int init(String... args) throws ParseException {
Options options = new Options();
doAll = doKerberos = false;
parallel = 1;
batchSize = 1000;
options.addOption(OptionBuilder.withLongOpt("all").withDescription("Import the full metastore").create('a'));
options.addOption(OptionBuilder.withLongOpt("batchsize").withDescription("Number of partitions to read and write in a batch, defaults to 1000").hasArg().create('b'));
options.addOption(OptionBuilder.withLongOpt("database").withDescription("Import a single database").hasArgs().create('d'));
options.addOption(OptionBuilder.withLongOpt("help").withDescription("You're looking at it").create('h'));
options.addOption(OptionBuilder.withLongOpt("function").withDescription("Import a single function").hasArgs().create('f'));
options.addOption(OptionBuilder.withLongOpt("kerberos").withDescription("Import all kerberos related objects (master key, tokens)").create('k'));
options.addOption(OptionBuilder.withLongOpt("parallel").withDescription("Parallel factor for loading (only applied to tables and partitions), " + "defaults to 1").hasArg().create('p'));
options.addOption(OptionBuilder.withLongOpt("role").withDescription("Import a single role").hasArgs().create('r'));
options.addOption(OptionBuilder.withLongOpt("tables").withDescription("Import a single tables").hasArgs().create('t'));
CommandLine cli = new GnuParser().parse(options, args);
// Process help, if it was asked for, this must be done first
if (cli.hasOption('h')) {
printHelp(options);
return 1;
}
boolean hasCmd = false;
// Now process the other command line args
if (cli.hasOption('a')) {
hasCmd = true;
doAll = true;
}
if (cli.hasOption('b')) {
batchSize = Integer.parseInt(cli.getOptionValue('b'));
}
if (cli.hasOption('d')) {
hasCmd = true;
dbsToImport = Arrays.asList(cli.getOptionValues('d'));
}
if (cli.hasOption('f')) {
hasCmd = true;
functionsToImport = Arrays.asList(cli.getOptionValues('f'));
}
if (cli.hasOption('p')) {
parallel = Integer.parseInt(cli.getOptionValue('p'));
}
if (cli.hasOption('r')) {
hasCmd = true;
rolesToImport = Arrays.asList(cli.getOptionValues('r'));
}
if (cli.hasOption('k')) {
doKerberos = true;
}
if (cli.hasOption('t')) {
hasCmd = true;
tablesToImport = Arrays.asList(cli.getOptionValues('t'));
}
if (!hasCmd) {
printHelp(options);
return 1;
}
dbs = new ArrayList<>();
// We don't want to bound the size of the table queue because we keep it all in memory
partitionedTables = new LinkedBlockingQueue<>();
tableNameQueue = new LinkedBlockingQueue<>();
indexNameQueue = new LinkedBlockingQueue<>();
// Bound the size of this queue so we don't get too much in memory.
partQueue = new ArrayBlockingQueue<>(parallel * 2);
return 0;
}
use of org.apache.commons.cli.Options in project checkstyle by checkstyle.
the class Main method buildOptions.
/**
* Builds and returns list of parameters supported by cli Checkstyle.
* @return available options
*/
private static Options buildOptions() {
final Options options = new Options();
options.addOption(OPTION_C_NAME, true, "Sets the check configuration file to use.");
options.addOption(OPTION_O_NAME, true, "Sets the output file. Defaults to stdout");
options.addOption(OPTION_P_NAME, true, "Loads the properties file");
options.addOption(OPTION_F_NAME, true, String.format("Sets the output format. (%s|%s). Defaults to %s", PLAIN_FORMAT_NAME, XML_FORMAT_NAME, PLAIN_FORMAT_NAME));
options.addOption(OPTION_V_NAME, false, "Print product version and exit");
options.addOption(OPTION_T_NAME, OPTION_TREE_NAME, false, "Print Abstract Syntax Tree(AST) of the file");
options.addOption(OPTION_CAPITAL_T_NAME, OPTION_TREE_COMMENT_NAME, false, "Print Abstract Syntax Tree(AST) of the file including comments");
options.addOption(OPTION_J_NAME, OPTION_JAVADOC_TREE_NAME, false, "Print Parse tree of the Javadoc comment");
options.addOption(OPTION_CAPITAL_J_NAME, OPTION_TREE_JAVADOC_NAME, false, "Print full Abstract Syntax Tree of the file");
options.addOption(OPTION_D_NAME, OPTION_DEBUG_NAME, false, "Print all debug logging of CheckStyle utility");
options.addOption(OPTION_E_NAME, OPTION_EXCLUDE_NAME, true, "Directory path to exclude from CheckStyle");
options.addOption(OPTION_X_NAME, OPTION_EXCLUDE_REGEXP_NAME, true, "Regular expression of directory to exclude from CheckStyle");
return options;
}
use of org.apache.commons.cli.Options in project bagheera by mozilla-metrics.
the class KafkaSequenceFileConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("o", "output", true, "HDFS base path for output."));
options.addOption(optFactory.create("df", "dateformat", true, "Date format for the date subdirectories."));
options.addOption(optFactory.create("fs", "filesize", true, "Max file size for output files."));
options.addOption(optFactory.create("b", "usebytes", false, "Use BytesWritable for value rather than Text."));
options.addOption(optFactory.create("ts", "addtimestamp", false, "Adds bagheera timestamp to the json"));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
sinkConfig.setString("hdfssink.hdfs.basedir.path", cmd.getOptionValue("output", "/bagheera"));
sinkConfig.setString("hdfssink.hdfs.date.format", cmd.getOptionValue("dateformat", "yyyy-MM-dd"));
sinkConfig.setLong("hdfssink.hdfs.max.filesize", Long.parseLong(cmd.getOptionValue("filesize", "536870912")));
sinkConfig.setBoolean("hdfssink.hdfs.usebytes", cmd.hasOption("usebytes"));
if (cmd.hasOption("addtimestamp")) {
sinkConfig.setBoolean("hdfssink.hdfs.addtimestamp", true);
}
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(SequenceFileSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink for consumer storage
consumer.setSinkFactory(sinkFactory);
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
prepareHealthChecks();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaSequenceFileConsumer.class.getName(), options);
} catch (NumberFormatException e) {
LOG.error("Failed to parse filesize option", e);
}
}
use of org.apache.commons.cli.Options in project bagheera by mozilla-metrics.
the class KafkaHBaseConsumer method main.
public static void main(String[] args) {
OptionFactory optFactory = OptionFactory.getInstance();
Options options = KafkaConsumer.getOptions();
options.addOption(optFactory.create("tbl", "table", true, "HBase table name.").required());
options.addOption(optFactory.create("f", "family", true, "Column family."));
options.addOption(optFactory.create("q", "qualifier", true, "Column qualifier."));
options.addOption(optFactory.create("b", "batchsize", true, "Batch size (number of messages per HBase flush)."));
options.addOption(optFactory.create("pd", "prefixdate", false, "Prefix key with salted date."));
CommandLineParser parser = new GnuParser();
ShutdownHook sh = ShutdownHook.getInstance();
try {
// Parse command line options
CommandLine cmd = parser.parse(options, args);
final KafkaConsumer consumer = KafkaConsumer.fromOptions(cmd);
sh.addFirst(consumer);
// Create a sink for storing data
SinkConfiguration sinkConfig = new SinkConfiguration();
if (cmd.hasOption("numthreads")) {
sinkConfig.setInt("hbasesink.hbase.numthreads", Integer.parseInt(cmd.getOptionValue("numthreads")));
}
if (cmd.hasOption("batchsize")) {
sinkConfig.setInt("hbasesink.hbase.batchsize", Integer.parseInt(cmd.getOptionValue("batchsize")));
}
sinkConfig.setString("hbasesink.hbase.tablename", cmd.getOptionValue("table"));
sinkConfig.setString("hbasesink.hbase.column.family", cmd.getOptionValue("family", "data"));
sinkConfig.setString("hbasesink.hbase.column.qualifier", cmd.getOptionValue("qualifier", "json"));
sinkConfig.setBoolean("hbasesink.hbase.rowkey.prefixdate", cmd.hasOption("prefixdate"));
KeyValueSinkFactory sinkFactory = KeyValueSinkFactory.getInstance(HBaseSink.class, sinkConfig);
sh.addLast(sinkFactory);
// Set the sink factory for consumer storage
consumer.setSinkFactory(sinkFactory);
prepareHealthChecks();
// Initialize metrics collection, reporting, etc.
final MetricsManager manager = MetricsManager.getDefaultMetricsManager();
// Begin polling
consumer.poll();
} catch (ParseException e) {
LOG.error("Error parsing command line options", e);
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp(KafkaHBaseConsumer.class.getName(), options);
}
}
Aggregations