use of org.apache.commons.cli.Option in project core by s4.
the class LoadGenerator method main.
public static void main(String[] args) {
Options options = new Options();
boolean warmUp = false;
options.addOption(OptionBuilder.withArgName("rate").hasArg().withDescription("Rate (events per second)").create("r"));
options.addOption(OptionBuilder.withArgName("display_rate").hasArg().withDescription("Display Rate at specified second boundary").create("d"));
options.addOption(OptionBuilder.withArgName("start_boundary").hasArg().withDescription("Start boundary in seconds").create("b"));
options.addOption(OptionBuilder.withArgName("run_for").hasArg().withDescription("Run for a specified number of seconds").create("x"));
options.addOption(OptionBuilder.withArgName("cluster_manager").hasArg().withDescription("Cluster manager").create("z"));
options.addOption(OptionBuilder.withArgName("sender_application_name").hasArg().withDescription("Sender application name").create("a"));
options.addOption(OptionBuilder.withArgName("listener_application_name").hasArg().withDescription("Listener application name").create("g"));
options.addOption(OptionBuilder.withArgName("sleep_overhead").hasArg().withDescription("Sleep overhead").create("o"));
options.addOption(new Option("w", "Warm-up"));
CommandLineParser parser = new GnuParser();
CommandLine line = null;
try {
// parse the command line arguments
line = parser.parse(options, args);
} catch (ParseException exp) {
// oops, something went wrong
System.err.println("Parsing failed. Reason: " + exp.getMessage());
System.exit(1);
}
int expectedRate = 250;
if (line.hasOption("r")) {
try {
expectedRate = Integer.parseInt(line.getOptionValue("r"));
} catch (Exception e) {
System.err.println("Bad expected rate specified " + line.getOptionValue("r"));
System.exit(1);
}
}
int displayRateIntervalSeconds = 20;
if (line.hasOption("d")) {
try {
displayRateIntervalSeconds = Integer.parseInt(line.getOptionValue("d"));
} catch (Exception e) {
System.err.println("Bad display rate value specified " + line.getOptionValue("d"));
System.exit(1);
}
}
int startBoundary = 2;
if (line.hasOption("b")) {
try {
startBoundary = Integer.parseInt(line.getOptionValue("b"));
} catch (Exception e) {
System.err.println("Bad start boundary value specified " + line.getOptionValue("b"));
System.exit(1);
}
}
int updateFrequency = 0;
if (line.hasOption("f")) {
try {
updateFrequency = Integer.parseInt(line.getOptionValue("f"));
} catch (Exception e) {
System.err.println("Bad query udpdate frequency specified " + line.getOptionValue("f"));
System.exit(1);
}
System.out.printf("Update frequency is %d\n", updateFrequency);
}
int runForTime = 0;
if (line.hasOption("x")) {
try {
runForTime = Integer.parseInt(line.getOptionValue("x"));
} catch (Exception e) {
System.err.println("Bad run for time specified " + line.getOptionValue("x"));
System.exit(1);
}
System.out.printf("Run for time is %d\n", runForTime);
}
String clusterManagerAddress = null;
if (line.hasOption("z")) {
clusterManagerAddress = line.getOptionValue("z");
}
String senderApplicationName = null;
if (line.hasOption("a")) {
senderApplicationName = line.getOptionValue("a");
}
String listenerApplicationName = null;
if (line.hasOption("a")) {
listenerApplicationName = line.getOptionValue("g");
}
if (listenerApplicationName == null) {
listenerApplicationName = senderApplicationName;
}
long sleepOverheadMicros = -1;
if (line.hasOption("o")) {
try {
sleepOverheadMicros = Long.parseLong(line.getOptionValue("o"));
} catch (NumberFormatException e) {
System.err.println("Bad sleep overhead specified " + line.getOptionValue("o"));
System.exit(1);
}
System.out.printf("Specified sleep overhead is %d\n", sleepOverheadMicros);
}
if (line.hasOption("w")) {
warmUp = true;
}
List loArgs = line.getArgList();
if (loArgs.size() < 1) {
System.err.println("No input file specified");
System.exit(1);
}
String inputFilename = (String) loArgs.get(0);
EventEmitter emitter = null;
SerializerDeserializer serDeser = new KryoSerDeser();
CommLayerEmitter clEmitter = new CommLayerEmitter();
clEmitter.setAppName(senderApplicationName);
clEmitter.setListenerAppName(listenerApplicationName);
clEmitter.setClusterManagerAddress(clusterManagerAddress);
clEmitter.setSenderId(String.valueOf(System.currentTimeMillis() / 1000));
clEmitter.setSerDeser(serDeser);
clEmitter.init();
emitter = clEmitter;
long endTime = 0;
if (runForTime > 0) {
endTime = System.currentTimeMillis() + (runForTime * 1000);
}
LoadGenerator loadGenerator = new LoadGenerator();
loadGenerator.setInputFilename(inputFilename);
loadGenerator.setEventEmitter(clEmitter);
loadGenerator.setDisplayRateInterval(displayRateIntervalSeconds);
loadGenerator.setExpectedRate(expectedRate);
loadGenerator.run();
System.exit(0);
}
use of org.apache.commons.cli.Option in project jstorm by alibaba.
the class GenericOptionsParser method buildGeneralOptions.
static Options buildGeneralOptions(Options opts) {
Options r = new Options();
for (Object o : opts.getOptions()) r.addOption((Option) o);
Option libjars = OptionBuilder.withArgName("paths").hasArg().withDescription("comma separated jars to be used by the submitted topology").create("libjars");
r.addOption(libjars);
optionProcessors.put("libjars", new LibjarsProcessor());
Option conf = OptionBuilder.withArgName("configuration file").hasArg().withDescription("an application configuration file").create("conf");
r.addOption(conf);
optionProcessors.put("conf", new ConfFileProcessor());
// Must come after `conf': this option is of higher priority
Option extraConfig = OptionBuilder.withArgName("D").hasArg().withDescription("extra configurations (preserving types)").create("D");
r.addOption(extraConfig);
optionProcessors.put("D", new ExtraConfigProcessor());
return r;
}
use of org.apache.commons.cli.Option in project cassandra by apache.
the class SSTableMetadataViewer method main.
/**
* @param args a list of sstables whose metadata we're interested in
*/
public static void main(String[] args) throws IOException {
PrintStream out = System.out;
Option optGcgs = new Option(null, GCGS_KEY, true, "The " + GCGS_KEY + " to use when calculating droppable tombstones");
Options options = new Options();
options.addOption(optGcgs);
CommandLine cmd = null;
CommandLineParser parser = new PosixParser();
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
printHelp(options, out);
}
if (cmd.getArgs().length == 0) {
printHelp(options, out);
}
int gcgs = Integer.parseInt(cmd.getOptionValue(GCGS_KEY, "0"));
Util.initDatabaseDescriptor();
for (String fname : cmd.getArgs()) {
if (new File(fname).exists()) {
Descriptor descriptor = Descriptor.fromFilename(fname);
Map<MetadataType, MetadataComponent> metadata = descriptor.getMetadataSerializer().deserialize(descriptor, EnumSet.allOf(MetadataType.class));
ValidationMetadata validation = (ValidationMetadata) metadata.get(MetadataType.VALIDATION);
StatsMetadata stats = (StatsMetadata) metadata.get(MetadataType.STATS);
CompactionMetadata compaction = (CompactionMetadata) metadata.get(MetadataType.COMPACTION);
CompressionMetadata compression = null;
File compressionFile = new File(descriptor.filenameFor(Component.COMPRESSION_INFO));
if (compressionFile.exists())
compression = CompressionMetadata.create(fname);
SerializationHeader.Component header = (SerializationHeader.Component) metadata.get(MetadataType.HEADER);
out.printf("SSTable: %s%n", descriptor);
if (validation != null) {
out.printf("Partitioner: %s%n", validation.partitioner);
out.printf("Bloom Filter FP chance: %f%n", validation.bloomFilterFPChance);
}
if (stats != null) {
out.printf("Minimum timestamp: %s%n", stats.minTimestamp);
out.printf("Maximum timestamp: %s%n", stats.maxTimestamp);
out.printf("SSTable min local deletion time: %s%n", stats.minLocalDeletionTime);
out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime);
out.printf("Compressor: %s%n", compression != null ? compression.compressor().getClass().getName() : "-");
if (compression != null)
out.printf("Compression ratio: %s%n", stats.compressionRatio);
out.printf("TTL min: %s%n", stats.minTTL);
out.printf("TTL max: %s%n", stats.maxTTL);
if (validation != null && header != null)
printMinMaxToken(descriptor, FBUtilities.newPartitioner(descriptor), header.getKeyType(), out);
if (header != null && header.getClusteringTypes().size() == stats.minClusteringValues.size()) {
List<AbstractType<?>> clusteringTypes = header.getClusteringTypes();
List<ByteBuffer> minClusteringValues = stats.minClusteringValues;
List<ByteBuffer> maxClusteringValues = stats.maxClusteringValues;
String[] minValues = new String[clusteringTypes.size()];
String[] maxValues = new String[clusteringTypes.size()];
for (int i = 0; i < clusteringTypes.size(); i++) {
minValues[i] = clusteringTypes.get(i).getString(minClusteringValues.get(i));
maxValues[i] = clusteringTypes.get(i).getString(maxClusteringValues.get(i));
}
out.printf("minClustringValues: %s%n", Arrays.toString(minValues));
out.printf("maxClustringValues: %s%n", Arrays.toString(maxValues));
}
out.printf("Estimated droppable tombstones: %s%n", stats.getEstimatedDroppableTombstoneRatio((int) (System.currentTimeMillis() / 1000) - gcgs));
out.printf("SSTable Level: %d%n", stats.sstableLevel);
out.printf("Repaired at: %d%n", stats.repairedAt);
out.printf("Pending repair: %s%n", stats.pendingRepair);
out.printf("Replay positions covered: %s%n", stats.commitLogIntervals);
out.printf("totalColumnsSet: %s%n", stats.totalColumnsSet);
out.printf("totalRows: %s%n", stats.totalRows);
out.println("Estimated tombstone drop times:");
for (Map.Entry<Number, long[]> entry : stats.estimatedTombstoneDropTime.getAsMap().entrySet()) {
out.printf("%-10s:%10s%n", entry.getKey().intValue(), entry.getValue()[0]);
}
printHistograms(stats, out);
}
if (compaction != null) {
out.printf("Estimated cardinality: %s%n", compaction.cardinalityEstimator.cardinality());
}
if (header != null) {
EncodingStats encodingStats = header.getEncodingStats();
AbstractType<?> keyType = header.getKeyType();
List<AbstractType<?>> clusteringTypes = header.getClusteringTypes();
Map<ByteBuffer, AbstractType<?>> staticColumns = header.getStaticColumns();
Map<String, String> statics = staticColumns.entrySet().stream().collect(Collectors.toMap(e -> UTF8Type.instance.getString(e.getKey()), e -> e.getValue().toString()));
Map<ByteBuffer, AbstractType<?>> regularColumns = header.getRegularColumns();
Map<String, String> regulars = regularColumns.entrySet().stream().collect(Collectors.toMap(e -> UTF8Type.instance.getString(e.getKey()), e -> e.getValue().toString()));
out.printf("EncodingStats minTTL: %s%n", encodingStats.minTTL);
out.printf("EncodingStats minLocalDeletionTime: %s%n", encodingStats.minLocalDeletionTime);
out.printf("EncodingStats minTimestamp: %s%n", encodingStats.minTimestamp);
out.printf("KeyType: %s%n", keyType.toString());
out.printf("ClusteringTypes: %s%n", clusteringTypes.toString());
out.printf("StaticColumns: {%s}%n", FBUtilities.toString(statics));
out.printf("RegularColumns: {%s}%n", FBUtilities.toString(regulars));
}
} else {
out.println("No such file: " + fname);
}
}
}
use of org.apache.commons.cli.Option in project flink by apache.
the class CliFrontendParser method buildGeneralOptions.
private static Options buildGeneralOptions(Options options) {
options.addOption(HELP_OPTION);
// backwards compatibility: ignore verbose flag (-v)
options.addOption(new Option("v", "verbose", false, "This option is deprecated."));
// add general options of all CLIs
for (CustomCommandLine customCLI : CliFrontend.getCustomCommandLineList()) {
customCLI.addGeneralOptions(options);
}
return options;
}
use of org.apache.commons.cli.Option in project flink by apache.
the class HadoopUtils method paramsFromGenericOptionsParser.
/**
* Returns {@link ParameterTool} for the arguments parsed by {@link GenericOptionsParser}
*
* @param args Input array arguments. It should be parsable by {@link GenericOptionsParser}
* @return A {@link ParameterTool}
* @throws IOException If arguments cannot be parsed by {@link GenericOptionsParser}
* @see GenericOptionsParser
*/
public static ParameterTool paramsFromGenericOptionsParser(String[] args) throws IOException {
Option[] options = new GenericOptionsParser(args).getCommandLine().getOptions();
Map<String, String> map = new HashMap<String, String>();
for (Option option : options) {
String[] split = option.getValue().split("=");
map.put(split[0], split[1]);
}
return ParameterTool.fromMap(map);
}
Aggregations