use of org.apache.commons.cli.BasicParser in project incubator-atlas by apache.
the class HiveMetaStoreBridge method main.
public static void main(String[] args) throws AtlasHookException {
try {
Configuration atlasConf = ApplicationProperties.get();
String[] atlasEndpoint = atlasConf.getStringArray(ATLAS_ENDPOINT);
if (atlasEndpoint == null || atlasEndpoint.length == 0) {
atlasEndpoint = new String[] { DEFAULT_DGI_URL };
}
AtlasClient atlasClient;
if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) {
String[] basicAuthUsernamePassword = AuthenticationUtil.getBasicAuthenticationInput();
atlasClient = new AtlasClient(atlasEndpoint, basicAuthUsernamePassword);
} else {
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
atlasClient = new AtlasClient(ugi, ugi.getShortUserName(), atlasEndpoint);
}
Options options = new Options();
CommandLineParser parser = new BasicParser();
CommandLine cmd = parser.parse(options, args);
boolean failOnError = false;
if (cmd.hasOption("failOnError")) {
failOnError = true;
}
HiveMetaStoreBridge hiveMetaStoreBridge = new HiveMetaStoreBridge(atlasConf, new HiveConf(), atlasClient);
hiveMetaStoreBridge.importHiveMetadata(failOnError);
} catch (Exception e) {
throw new AtlasHookException("HiveMetaStoreBridge.main() failed.", e);
}
}
use of org.apache.commons.cli.BasicParser in project distributedlog by twitter.
the class DistributedLogServerApp method run.
private void run() {
try {
logger.info("Running distributedlog server : args = {}", Arrays.toString(args));
BasicParser parser = new BasicParser();
CommandLine cmdline = parser.parse(options, args);
runCmd(cmdline);
} catch (ParseException pe) {
logger.error("Argument error : {}", pe.getMessage());
printUsage();
Runtime.getRuntime().exit(-1);
} catch (IllegalArgumentException iae) {
logger.error("Argument error : {}", iae.getMessage());
printUsage();
Runtime.getRuntime().exit(-1);
} catch (ConfigurationException ce) {
logger.error("Configuration error : {}", ce.getMessage());
printUsage();
Runtime.getRuntime().exit(-1);
} catch (IOException ie) {
logger.error("Failed to start distributedlog server : ", ie);
Runtime.getRuntime().exit(-1);
}
}
use of org.apache.commons.cli.BasicParser in project distributedlog by twitter.
the class MonitorServiceApp method run.
private void run() {
try {
logger.info("Running monitor service.");
BasicParser parser = new BasicParser();
CommandLine cmdline = parser.parse(options, args);
runCmd(cmdline);
} catch (ParseException pe) {
printUsage();
Runtime.getRuntime().exit(-1);
} catch (IOException ie) {
logger.error("Failed to start monitor service : ", ie);
Runtime.getRuntime().exit(-1);
}
}
use of org.apache.commons.cli.BasicParser in project cdap by caskdata.
the class UpgradeTool method main.
public static void main(String[] args) throws Exception {
Options options = new Options().addOption(new Option("h", "help", false, "Print this usage message.")).addOption(new Option("u", "uri", true, "CDAP instance URI to interact with in the format " + "[http[s]://]<hostname>:<port>. Defaults to localhost:11015.")).addOption(new Option("a", "accesstoken", true, "File containing the access token to use when interacting " + "with a secure CDAP instance.")).addOption(new Option("t", "timeout", true, "Timeout in milliseconds to use when interacting with the " + "CDAP RESTful APIs. Defaults to " + DEFAULT_READ_TIMEOUT_MILLIS + ".")).addOption(new Option("n", "namespace", true, "Namespace to perform the upgrade in. If none is given, " + "pipelines in all namespaces will be upgraded.")).addOption(new Option("p", "pipeline", true, "Name of the pipeline to upgrade. If specified, a namespace " + "must also be given.")).addOption(new Option("f", "configfile", true, "File containing old application details to update. " + "The file contents are expected to be in the same format as the request body for creating an " + "ETL application from one of the etl artifacts. " + "It is expected to be a JSON Object containing 'artifact' and 'config' fields." + "The value for 'artifact' must be a JSON Object that specifies the artifact scope, name, and version. " + "The value for 'config' must be a JSON Object specifies the source, transforms, and sinks of the pipeline, " + "as expected by older versions of the etl artifacts.")).addOption(new Option("o", "outputfile", true, "File to write the converted application details provided in " + "the configfile option. If none is given, results will be written to the input file + '.converted'. " + "The contents of this file can be sent directly to CDAP to update or create an application.")).addOption(new Option("e", "errorDir", true, "Optional directory to write any upgraded pipeline configs that " + "failed to upgrade. The problematic configs can then be manually edited and upgraded separately. " + "Upgrade errors may happen for pipelines that use plugins that are not backwards compatible. " + "This directory must be writable by the user that is running this tool."));
CommandLineParser parser = new BasicParser();
CommandLine commandLine = parser.parse(options, args);
String[] commandArgs = commandLine.getArgs();
// if help is an option, or if there isn't a single 'upgrade' command, print usage and exit.
if (commandLine.hasOption("h") || commandArgs.length != 1 || !"upgrade".equalsIgnoreCase(commandArgs[0])) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp(UpgradeTool.class.getName() + " upgrade", "Upgrades old pipelines to the current version. If the plugins used are not backward-compatible, " + "the attempted upgrade config will be written to the error directory for a manual upgrade.", options, "");
System.exit(0);
}
ClientConfig clientConfig = getClientConfig(commandLine);
if (commandLine.hasOption("f")) {
String inputFilePath = commandLine.getOptionValue("f");
String outputFilePath = commandLine.hasOption("o") ? commandLine.getOptionValue("o") : inputFilePath + ".new";
convertFile(inputFilePath, outputFilePath, new Upgrader(new ArtifactClient(clientConfig)));
System.exit(0);
}
File errorDir = commandLine.hasOption("e") ? new File(commandLine.getOptionValue("e")) : null;
if (errorDir != null) {
if (!errorDir.exists()) {
if (!errorDir.mkdirs()) {
LOG.error("Unable to create error directory {}.", errorDir.getAbsolutePath());
System.exit(1);
}
} else if (!errorDir.isDirectory()) {
LOG.error("{} is not a directory.", errorDir.getAbsolutePath());
System.exit(1);
} else if (!errorDir.canWrite()) {
LOG.error("Unable to write to error directory {}.", errorDir.getAbsolutePath());
System.exit(1);
}
}
UpgradeTool upgradeTool = new UpgradeTool(clientConfig, errorDir);
String namespace = commandLine.getOptionValue("n");
String pipelineName = commandLine.getOptionValue("p");
if (pipelineName != null) {
if (namespace == null) {
throw new IllegalArgumentException("Must specify a namespace when specifying a pipeline.");
}
ApplicationId appId = new ApplicationId(namespace, pipelineName);
if (upgradeTool.upgrade(appId)) {
LOG.info("Successfully upgraded {}.", appId);
} else {
LOG.info("{} did not need to be upgraded.", appId);
}
System.exit(0);
}
if (namespace != null) {
printUpgraded(upgradeTool.upgrade(new NamespaceId(namespace)));
System.exit(0);
}
printUpgraded(upgradeTool.upgrade());
}
use of org.apache.commons.cli.BasicParser in project cdap by caskdata.
the class JobQueueDebugger method main.
public static void main(String[] args) throws Exception {
Options options = new Options().addOption(new Option("h", "help", false, "Print this usage message.")).addOption(new Option("p", "partition", true, "JobQueue partition to debug. Defaults to all partitions.")).addOption(new Option("t", "trace", false, "Trace mode. Prints all of the jobs being debugged."));
CommandLineParser parser = new BasicParser();
CommandLine commandLine = parser.parse(options, args);
String[] commandArgs = commandLine.getArgs();
// if help is an option, or if there is a command, print usage and exit.
if (commandLine.hasOption("h") || commandArgs.length != 0) {
HelpFormatter helpFormatter = new HelpFormatter();
helpFormatter.printHelp(JobQueueDebugger.class.getName(), "Scans the JobQueueDataset and prints statistics about the Jobs in it.", options, "");
System.exit(0);
}
Integer partition = null;
if (commandLine.hasOption("p")) {
String partitionString = commandLine.getOptionValue("p");
partition = Integer.valueOf(partitionString);
}
boolean trace = false;
if (commandLine.hasOption("t")) {
trace = true;
}
JobQueueDebugger debugger = createDebugger();
debugger.startAndWait();
debugger.printTopicMessageIds();
if (partition == null) {
debugger.scanPartitions(trace);
} else {
debugger.scanPartition(partition, trace);
}
debugger.stopAndWait();
}
Aggregations