use of org.apache.commons.cli.ParseException in project opennms by OpenNMS.
the class CheckWmi method main.
/**
* <p>main</p>
*
* @param args an array of {@link java.lang.String} objects.
* @throws org.apache.commons.cli.ParseException if any.
*/
public static void main(final String[] args) throws ParseException {
final Options options = new Options();
options.addOption("domain", true, "the NT/AD domain the credentials belong to");
options.addOption("wmiClass", true, "the object class in WMI to query");
options.addOption("wmiNamespace", true, "the namespace in WMI to use (default: " + WmiParams.WMI_DEFAULT_NAMESPACE + ")");
options.addOption("wmiObject", true, "the object to query in WMI");
options.addOption("wmiWql", true, "the query string to execute in WMI");
options.addOption("op", true, "compare operation: NOOP, EQ, NEQ, GT, LT");
options.addOption("value", true, "the value to compare to");
options.addOption("matchType", true, "type of matching for multiple results: all, none, some, one");
final CommandLineParser parser = new PosixParser();
final CommandLine cmd = parser.parse(options, args);
@SuppressWarnings("unchecked") List<String> arguments = (List<String>) cmd.getArgList();
if (arguments.size() < 3) {
usage(options, cmd);
System.exit(1);
}
final String host = arguments.remove(0);
final String user = arguments.remove(0);
final String pass = arguments.remove(0);
String wmiClass = "";
if (cmd.hasOption("wmiClass")) {
wmiClass = cmd.getOptionValue("wmiClass");
}
/* else {
usage(options, cmd);
System.exit(1);
}*/
String wmiObject = "";
if (cmd.hasOption("wmiObject")) {
wmiObject = cmd.getOptionValue("wmiObject");
} else {
usage(options, cmd);
System.exit(1);
}
String wmiNamespace = WmiParams.WMI_DEFAULT_NAMESPACE;
if (cmd.hasOption("wmiNamespace")) {
wmiNamespace = cmd.getOptionValue("wmiNamespace");
}
String wmiWql = "";
if (cmd.hasOption("wmiWql")) {
wmiWql = cmd.getOptionValue("wmiWql");
}
/*else {
usage(options, cmd);
System.exit(1);
} */
String compVal = "";
if (cmd.hasOption("value")) {
compVal = cmd.getOptionValue("value");
} else {
usage(options, cmd);
System.exit(1);
}
String compOp = "";
if (cmd.hasOption("op")) {
compOp = cmd.getOptionValue("op");
} else {
usage(options, cmd);
System.exit(1);
}
String domain = "";
if (cmd.hasOption("domain")) {
domain = cmd.getOptionValue("domain");
}
String matchType = "all";
if (cmd.hasOption("matchType")) {
matchType = cmd.getOptionValue("matchType");
}
try {
// Hold the WMI objects from the results.
List<Object> wmiObjects;
// Create the check parameters holder.
WmiParams clientParams;
if (wmiWql == null || wmiWql.length() == 0)
clientParams = new WmiParams(WmiParams.WMI_OPERATION_INSTANCEOF, compVal, compOp, wmiClass, wmiObject);
else
clientParams = new WmiParams(WmiParams.WMI_OPERATION_WQL, compVal, compOp, wmiWql, wmiObject);
// Create the WMI Manager
final WmiManager mgr = new WmiManager(host, user, pass, domain, matchType);
mgr.setNamespace(wmiNamespace);
// Connect to the WMI server.
mgr.init();
// Perform the operation specified in the parameters.
final WmiResult result = mgr.performOp(clientParams);
// And retrieve the WMI objects from the results.
wmiObjects = result.getResponse();
// Now output a brief report of the check results.
System.out.println("Checking: " + wmiWql + " for " + wmiObject + " Op: " + compOp + " Val: " + compVal);
System.out.println("Check results: " + WmiResult.convertStateToString(result.getResultCode()) + " (" + wmiObjects.size() + ")");
for (int i = 0; i < wmiObjects.size(); i++) {
System.out.println("Result for (" + (i + 1) + ") " + wmiClass + "\\" + wmiObject + ": " + wmiObjects.get(i));
}
// Disconnect when we're done.
mgr.close();
} catch (final Exception e) {
e.printStackTrace();
}
}
use of org.apache.commons.cli.ParseException in project metron by apache.
the class PcapServiceCli method parse.
public void parse() {
CommandLineParser parser = new BasicParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e1) {
e1.printStackTrace();
}
if (cmd.hasOption("h")) {
help();
}
if (cmd.hasOption("log4j")) {
PropertyConfigurator.configure(cmd.getOptionValue("log4j"));
}
if (cmd.hasOption("port")) {
try {
port = Integer.parseInt(cmd.getOptionValue("port").trim());
} catch (Exception e) {
System.out.println("[Metron] Invalid value for port entered");
help();
}
}
if (cmd.hasOption("pcap_hdfs_path")) {
pcapHdfsPath = cmd.getOptionValue("pcap_hdfs_path");
} else {
throw new IllegalStateException("You must specify the pcap hdfs path");
}
if (cmd.hasOption("query_hdfs_path")) {
queryHdfsPath = cmd.getOptionValue("query_hdfs_path");
} else {
throw new IllegalStateException("You must specify the query temp hdfs path");
}
if (cmd.hasOption("endpoint_uri")) {
try {
if (uri == null || uri.equals(""))
throw new Exception("invalid uri");
uri = cmd.getOptionValue("uri").trim();
if (uri.charAt(0) != '/')
uri = "/" + uri;
if (uri.charAt(uri.length()) == '/')
uri = uri.substring(0, uri.length() - 1);
} catch (Exception e) {
System.out.println("[Metron] Invalid URI entered");
help();
}
}
}
use of org.apache.commons.cli.ParseException in project hive by apache.
the class HiveMetaTool method main.
public static void main(String[] args) {
HiveMetaTool metaTool = new HiveMetaTool();
metaTool.init();
CommandLineParser parser = new GnuParser();
CommandLine line = null;
try {
try {
line = parser.parse(metaTool.cmdLineOptions, args);
} catch (ParseException e) {
System.err.println("HiveMetaTool:Parsing failed. Reason: " + e.getLocalizedMessage());
printAndExit(metaTool);
}
if (line.hasOption("help")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("metatool", metaTool.cmdLineOptions);
} else if (line.hasOption("listFSRoot")) {
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
printAndExit(metaTool);
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
printAndExit(metaTool);
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
printAndExit(metaTool);
}
metaTool.listFSRoot();
} else if (line.hasOption("executeJDOQL")) {
String query = line.getOptionValue("executeJDOQL");
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
printAndExit(metaTool);
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
printAndExit(metaTool);
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
printAndExit(metaTool);
}
if (query.toLowerCase().trim().startsWith("select")) {
metaTool.executeJDOQLSelect(query);
} else if (query.toLowerCase().trim().startsWith("update")) {
metaTool.executeJDOQLUpdate(query);
} else {
System.err.println("HiveMetaTool:Unsupported statement type");
printAndExit(metaTool);
}
} else if (line.hasOption("updateLocation")) {
String[] loc = line.getOptionValues("updateLocation");
boolean isDryRun = false;
String serdepropKey = null;
String tablePropKey = null;
if (loc.length != 2 && loc.length != 3) {
System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " + "optional arguments but " + "was passed " + loc.length + " arguments");
printAndExit(metaTool);
}
Path newPath = new Path(loc[0]);
Path oldPath = new Path(loc[1]);
URI oldURI = oldPath.toUri();
URI newURI = newPath.toUri();
if (line.hasOption("dryRun")) {
isDryRun = true;
}
if (line.hasOption("serdePropKey")) {
serdepropKey = line.getOptionValue("serdePropKey");
}
if (line.hasOption("tablePropKey")) {
tablePropKey = line.getOptionValue("tablePropKey");
}
/*
* validate input - Both new and old URI should contain valid host names and valid schemes.
* port is optional in both the URIs since HDFS HA NN URI doesn't have a port.
*/
if (oldURI.getHost() == null || newURI.getHost() == null) {
System.err.println("HiveMetaTool:A valid host is required in both old-loc and new-loc");
} else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
} else {
metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
}
} else {
if (line.hasOption("dryRun")) {
System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
} else if (line.hasOption("serdePropKey")) {
System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
} else if (line.hasOption("tablePropKey")) {
System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
printAndExit(metaTool);
} else {
System.err.print("HiveMetaTool:Parsing failed. Reason: Invalid arguments: ");
for (String s : line.getArgs()) {
System.err.print(s + " ");
}
System.err.println();
}
printAndExit(metaTool);
}
} finally {
metaTool.shutdownObjectStore();
}
}
use of org.apache.commons.cli.ParseException in project janusgraph by JanusGraph.
the class AwsCodePipelinesCi method main.
public static void main(String[] args) {
int status = 0;
try {
final Options options = new Options();
OPTIONS.forEach(options::addOption);
new AwsCodePipelinesCi(new DefaultParser().parse(options, args)).run();
} catch (ParseException | IllegalArgumentException e) {
log.error(e.getMessage(), e);
// EINVAL
status = 22;
} catch (IAMException e) {
log.error(e.getMessage(), e);
// EPERM
status = 1;
} catch (Exception e) {
log.error(e.getMessage(), e);
// EAGAIN
status = 11;
}
System.exit(status);
}
use of org.apache.commons.cli.ParseException in project kie-wb-common by kiegroup.
the class AFMavenCli method cli.
protected void cli(AFCliRequest cliRequest) throws Exception {
//
// Parsing errors can happen during the processing of the arguments and we prefer not having to check if
// the logger is null and construct this so we can use an SLF4J logger everywhere.
//
slf4jLogger = new Slf4jStdoutLogger();
CLIManager cliManager = new CLIManager();
List<String> args = new ArrayList<String>();
try {
Path configFile = Paths.get(cliRequest.getMultiModuleProjectDirectory(), ".mvn/maven.config");
if (java.nio.file.Files.isRegularFile(configFile)) {
for (String arg : Files.toString(configFile.toFile(), Charsets.UTF_8).split("\\s+")) {
args.add(arg);
}
CommandLine config = cliManager.parse(args.toArray(new String[args.size()]));
List<?> unrecongized = config.getArgList();
if (!unrecongized.isEmpty()) {
throw new ParseException("Unrecognized maven.config entries: " + unrecongized);
}
}
} catch (ParseException e) {
System.err.println("Unable to parse maven.config: " + e.getMessage());
cliManager.displayHelp(output);
throw e;
}
try {
args.addAll(0, Arrays.asList(cliRequest.getArgs()));
cliRequest.setCommandLine(cliManager.parse(args.toArray(new String[args.size()])));
} catch (ParseException e) {
System.err.println("Unable to parse command line options: " + e.getMessage());
cliManager.displayHelp(output);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
cliManager.displayHelp(ps);
throw e;
}
if (cliRequest.getCommandLine().hasOption(CLIManager.HELP)) {
cliManager.displayHelp(output);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
cliManager.displayHelp(ps);
throw new ExitException(0);
}
if (cliRequest.getCommandLine().hasOption(CLIManager.VERSION)) {
System.out.println(AFCLIReportingUtils.showVersion());
throw new ExitException(0);
}
}
Aggregations