use of org.apache.commons.cli.PosixParser in project head by mifos.
the class DbUnitDataImportExport method parseOptions.
public void parseOptions(String[] args) {
// create the command line parser
CommandLineParser parser = new PosixParser();
try {
// parse the command line arguments
CommandLine line = parser.parse(options, args);
if (line.hasOption(HELP_OPTION_NAME)) {
showHelp(options);
System.exit(0);
}
if (line.hasOption(FILE_OPTION_NAME)) {
fileName = line.getOptionValue(FILE_OPTION_NAME);
}
if (line.hasOption(USER_OPTION_NAME)) {
user = line.getOptionValue(USER_OPTION_NAME);
} else {
missingOption(userOption);
}
if (line.hasOption(PASSWORD_OPTION_NAME)) {
password = line.getOptionValue(PASSWORD_OPTION_NAME);
} else {
missingOption(passwordOption);
}
if (line.hasOption(DATABASE_OPTION_NAME)) {
databaseName = line.getOptionValue(DATABASE_OPTION_NAME);
}
if (line.hasOption(IMPORT_OPTION_NAME)) {
doExport = false;
} else if (line.hasOption(EXPORT_OPTION_NAME)) {
doExport = true;
if (line.hasOption(SQL_OPTION_NAME)) {
exportAsSql = true;
}
}
} catch (ParseException exp) {
fail("Parsing failed. Reason: " + exp.getMessage());
}
}
use of org.apache.commons.cli.PosixParser in project henplus by neurolabs.
the class HenPlus method readCommandLineOptions.
/**
* @param argv
*/
private void readCommandLineOptions(final String[] argv) {
final Options availableOptions = getMainOptions();
registerCommandOptions(availableOptions);
final CommandLineParser parser = new PosixParser();
CommandLine line = null;
try {
line = parser.parse(availableOptions, argv);
if (line.hasOption('h')) {
usageAndExit(availableOptions, 0);
}
if (line.hasOption('s')) {
_quiet = true;
}
if (line.hasOption('v')) {
_verbose = true;
}
handleCommandOptions(line);
} catch (final Exception e) {
Logger.error("Error handling command line arguments", e);
usageAndExit(availableOptions, 1);
}
}
use of org.apache.commons.cli.PosixParser in project f5less by sky87.
the class TheBuilder method main.
public static void main(String[] argv) throws Exception {
Options options = new Options();
options.addOption("js", false, "Create f5less.js");
options.addOption("p", "port", true, "ws:reload websocket server port");
options.addOption("h", "host", true, "ws:reload websocket server host");
options.addOption("d", "debounce", true, "debouncing for commands");
CommandLine cmdLine = new PosixParser().parse(options, argv);
String[] args = cmdLine.getArgs();
if (cmdLine.hasOption("js")) {
Files.copy(TheBuilder.class.getClassLoader().getResourceAsStream("f5less.js"), Paths.get("./f5less.js"), StandardCopyOption.REPLACE_EXISTING);
System.out.println("\n f5less.js created...\n put the following\n" + " <script type=\"text/javascript\" src=\"f5less.js\"></script>\n" + " <script type=\"text/javascript\">f5less.connect()</script>\n" + " in the page for which you want automatic reloading (assuming 'f5less.js' is in the same directory)\n");
}
if (args.length < 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("f5less [options] path1 cmd1 path2 cmd2 ...", options);
System.exit(-1);
}
final List<String> reloadPaths = new LinkedList<String>();
final int commandsDebounce = Integer.parseInt(cmdLine.getOptionValue("d", "100"));
for (int i = 0; i < args.length / 2; i++) {
final String path = args[i * 2];
final String rawCmd = args[i * 2 + 1];
if (rawCmd.equals("ws:reload"))
reloadPaths.add(path);
else
Watcher.register(path, new CommandListener(path, rawCmd, commandsDebounce));
}
ReloadServer reloadServer = null;
if (!reloadPaths.isEmpty()) {
reloadServer = new ReloadServer(cmdLine.getOptionValue("h", "localhost"), Integer.parseInt(cmdLine.getOptionValue("p", "9999")), commandsDebounce + 50);
reloadServer.monitor(reloadPaths);
}
System.out.println("Press enter to exit...");
System.in.read();
if (reloadServer != null)
reloadServer.stop();
Watcher.stop();
CommandListener.stop();
System.out.println("Bye bye");
}
use of org.apache.commons.cli.PosixParser in project cassandra by apache.
the class SSTableMetadataViewer method main.
/**
* @param args a list of sstables whose metadata we're interested in
*/
public static void main(String[] args) throws IOException {
PrintStream out = System.out;
Option optGcgs = new Option(null, GCGS_KEY, true, "The " + GCGS_KEY + " to use when calculating droppable tombstones");
Options options = new Options();
options.addOption(optGcgs);
CommandLine cmd = null;
CommandLineParser parser = new PosixParser();
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
printHelp(options, out);
}
if (cmd.getArgs().length == 0) {
printHelp(options, out);
}
int gcgs = Integer.parseInt(cmd.getOptionValue(GCGS_KEY, "0"));
Util.initDatabaseDescriptor();
for (String fname : cmd.getArgs()) {
if (new File(fname).exists()) {
Descriptor descriptor = Descriptor.fromFilename(fname);
Map<MetadataType, MetadataComponent> metadata = descriptor.getMetadataSerializer().deserialize(descriptor, EnumSet.allOf(MetadataType.class));
ValidationMetadata validation = (ValidationMetadata) metadata.get(MetadataType.VALIDATION);
StatsMetadata stats = (StatsMetadata) metadata.get(MetadataType.STATS);
CompactionMetadata compaction = (CompactionMetadata) metadata.get(MetadataType.COMPACTION);
CompressionMetadata compression = null;
File compressionFile = new File(descriptor.filenameFor(Component.COMPRESSION_INFO));
if (compressionFile.exists())
compression = CompressionMetadata.create(fname);
SerializationHeader.Component header = (SerializationHeader.Component) metadata.get(MetadataType.HEADER);
out.printf("SSTable: %s%n", descriptor);
if (validation != null) {
out.printf("Partitioner: %s%n", validation.partitioner);
out.printf("Bloom Filter FP chance: %f%n", validation.bloomFilterFPChance);
}
if (stats != null) {
out.printf("Minimum timestamp: %s%n", stats.minTimestamp);
out.printf("Maximum timestamp: %s%n", stats.maxTimestamp);
out.printf("SSTable min local deletion time: %s%n", stats.minLocalDeletionTime);
out.printf("SSTable max local deletion time: %s%n", stats.maxLocalDeletionTime);
out.printf("Compressor: %s%n", compression != null ? compression.compressor().getClass().getName() : "-");
if (compression != null)
out.printf("Compression ratio: %s%n", stats.compressionRatio);
out.printf("TTL min: %s%n", stats.minTTL);
out.printf("TTL max: %s%n", stats.maxTTL);
if (validation != null && header != null)
printMinMaxToken(descriptor, FBUtilities.newPartitioner(descriptor), header.getKeyType(), out);
if (header != null && header.getClusteringTypes().size() == stats.minClusteringValues.size()) {
List<AbstractType<?>> clusteringTypes = header.getClusteringTypes();
List<ByteBuffer> minClusteringValues = stats.minClusteringValues;
List<ByteBuffer> maxClusteringValues = stats.maxClusteringValues;
String[] minValues = new String[clusteringTypes.size()];
String[] maxValues = new String[clusteringTypes.size()];
for (int i = 0; i < clusteringTypes.size(); i++) {
minValues[i] = clusteringTypes.get(i).getString(minClusteringValues.get(i));
maxValues[i] = clusteringTypes.get(i).getString(maxClusteringValues.get(i));
}
out.printf("minClustringValues: %s%n", Arrays.toString(minValues));
out.printf("maxClustringValues: %s%n", Arrays.toString(maxValues));
}
out.printf("Estimated droppable tombstones: %s%n", stats.getEstimatedDroppableTombstoneRatio((int) (System.currentTimeMillis() / 1000) - gcgs));
out.printf("SSTable Level: %d%n", stats.sstableLevel);
out.printf("Repaired at: %d%n", stats.repairedAt);
out.printf("Pending repair: %s%n", stats.pendingRepair);
out.printf("Replay positions covered: %s%n", stats.commitLogIntervals);
out.printf("totalColumnsSet: %s%n", stats.totalColumnsSet);
out.printf("totalRows: %s%n", stats.totalRows);
out.println("Estimated tombstone drop times:");
for (Map.Entry<Number, long[]> entry : stats.estimatedTombstoneDropTime.getAsMap().entrySet()) {
out.printf("%-10s:%10s%n", entry.getKey().intValue(), entry.getValue()[0]);
}
printHistograms(stats, out);
}
if (compaction != null) {
out.printf("Estimated cardinality: %s%n", compaction.cardinalityEstimator.cardinality());
}
if (header != null) {
EncodingStats encodingStats = header.getEncodingStats();
AbstractType<?> keyType = header.getKeyType();
List<AbstractType<?>> clusteringTypes = header.getClusteringTypes();
Map<ByteBuffer, AbstractType<?>> staticColumns = header.getStaticColumns();
Map<String, String> statics = staticColumns.entrySet().stream().collect(Collectors.toMap(e -> UTF8Type.instance.getString(e.getKey()), e -> e.getValue().toString()));
Map<ByteBuffer, AbstractType<?>> regularColumns = header.getRegularColumns();
Map<String, String> regulars = regularColumns.entrySet().stream().collect(Collectors.toMap(e -> UTF8Type.instance.getString(e.getKey()), e -> e.getValue().toString()));
out.printf("EncodingStats minTTL: %s%n", encodingStats.minTTL);
out.printf("EncodingStats minLocalDeletionTime: %s%n", encodingStats.minLocalDeletionTime);
out.printf("EncodingStats minTimestamp: %s%n", encodingStats.minTimestamp);
out.printf("KeyType: %s%n", keyType.toString());
out.printf("ClusteringTypes: %s%n", clusteringTypes.toString());
out.printf("StaticColumns: {%s}%n", FBUtilities.toString(statics));
out.printf("RegularColumns: {%s}%n", FBUtilities.toString(regulars));
}
} else {
out.println("No such file: " + fname);
}
}
}
use of org.apache.commons.cli.PosixParser in project rest.li by linkedin.
the class RestLiSnapshotCompatibilityChecker method main.
public static void main(String[] args) {
final Options options = new Options();
options.addOption("h", "help", false, "Print help");
options.addOption(OptionBuilder.withArgName("compatibility_level").withLongOpt("compat").hasArg().withDescription("Compatibility level " + listCompatLevelOptions()).create('c'));
options.addOption(OptionBuilder.withLongOpt("report").withDescription("Prints a report at the end of the execution that can be parsed for reporting to other tools").create("report"));
final String cmdLineSyntax = RestLiSnapshotCompatibilityChecker.class.getCanonicalName() + " [pairs of <prevRestspecPath currRestspecPath>]";
final CommandLineParser parser = new PosixParser();
final CommandLine cmd;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
new HelpFormatter().printHelp(cmdLineSyntax, options, true);
System.exit(255);
// to suppress IDE warning
return;
}
final String[] targets = cmd.getArgs();
if (cmd.hasOption('h') || targets.length < 2 || targets.length % 2 != 0) {
new HelpFormatter().printHelp(cmdLineSyntax, options, true);
System.exit(255);
}
final String compatValue;
if (cmd.hasOption('c')) {
compatValue = cmd.getOptionValue('c');
} else {
compatValue = CompatibilityLevel.DEFAULT.name();
}
final CompatibilityLevel compat;
try {
compat = CompatibilityLevel.valueOf(compatValue.toUpperCase());
} catch (IllegalArgumentException e) {
new HelpFormatter().printHelp(cmdLineSyntax, options, true);
System.exit(255);
return;
}
final String resolverPath = System.getProperty(AbstractGenerator.GENERATOR_RESOLVER_PATH);
final RestLiSnapshotCompatibilityChecker checker = new RestLiSnapshotCompatibilityChecker();
checker.setResolverPath(resolverPath);
for (int i = 1; i < targets.length; i += 2) {
String prevTarget = targets[i - 1];
String currTarget = targets[i];
checker.checkCompatibility(prevTarget, currTarget, compat, prevTarget.endsWith(".restspec.json"));
}
String summary = checker.getInfoMap().createSummary();
if (compat != CompatibilityLevel.OFF && summary.length() > 0) {
System.out.println(summary);
}
if (cmd.hasOption("report")) {
System.out.println(new CompatibilityReport(checker.getInfoMap(), compat).createReport());
System.exit(0);
}
System.exit(checker.getInfoMap().isCompatible(compat) ? 0 : 1);
}
Aggregations