use of net.sourceforge.argparse4j.inf.ArgumentParser in project kafka by apache.
the class AgentClient method main.
public static void main(String[] args) throws Exception {
ArgumentParser rootParser = ArgumentParsers.newArgumentParser("trogdor-agent-client").defaultHelp(true).description("The Trogdor agent client.");
Subparsers subParsers = rootParser.addSubparsers().dest("command");
Subparser uptimeParser = subParsers.addParser("uptime").help("Get the agent uptime.");
addTargetArgument(uptimeParser);
addJsonArgument(uptimeParser);
Subparser statusParser = subParsers.addParser("status").help("Get the agent status.");
addTargetArgument(statusParser);
addJsonArgument(statusParser);
Subparser createWorkerParser = subParsers.addParser("createWorker").help("Create a new worker.");
addTargetArgument(createWorkerParser);
addWorkerIdArgument(createWorkerParser, "The worker ID to create.");
createWorkerParser.addArgument("--taskId").action(store()).required(true).type(String.class).dest("taskId").metavar("TASK_ID").help("The task ID to create.");
createWorkerParser.addArgument("--spec", "-s").action(store()).required(true).type(String.class).dest("taskSpec").metavar("TASK_SPEC").help("The task spec to create, or a path to a file containing the task spec.");
Subparser stopWorkerParser = subParsers.addParser("stopWorker").help("Stop a worker.");
addTargetArgument(stopWorkerParser);
addWorkerIdArgument(stopWorkerParser, "The worker ID to stop.");
Subparser destroyWorkerParser = subParsers.addParser("destroyWorker").help("Destroy a worker.");
addTargetArgument(destroyWorkerParser);
addWorkerIdArgument(destroyWorkerParser, "The worker ID to destroy.");
Subparser shutdownParser = subParsers.addParser("shutdown").help("Shut down the agent.");
addTargetArgument(shutdownParser);
Namespace res = rootParser.parseArgsOrFail(args);
String target = res.getString("target");
AgentClient client = new Builder().maxTries(3).target(target).build();
ZoneOffset localOffset = OffsetDateTime.now().getOffset();
switch(res.getString("command")) {
case "uptime":
{
UptimeResponse uptime = client.uptime();
if (res.getBoolean("json")) {
System.out.println(JsonUtil.toJsonString(uptime));
} else {
System.out.printf("Agent is running at %s.%n", target);
System.out.printf("\tStart time: %s%n", dateString(uptime.serverStartMs(), localOffset));
System.out.printf("\tCurrent server time: %s%n", dateString(uptime.nowMs(), localOffset));
System.out.printf("\tUptime: %s%n", durationString(uptime.nowMs() - uptime.serverStartMs()));
}
break;
}
case "status":
{
AgentStatusResponse status = client.status();
if (res.getBoolean("json")) {
System.out.println(JsonUtil.toJsonString(status));
} else {
System.out.printf("Agent is running at %s.%n", target);
System.out.printf("\tStart time: %s%n", dateString(status.serverStartMs(), localOffset));
List<List<String>> lines = new ArrayList<>();
List<String> header = new ArrayList<>(Arrays.asList("WORKER_ID", "TASK_ID", "STATE", "TASK_TYPE"));
lines.add(header);
for (Map.Entry<Long, WorkerState> entry : status.workers().entrySet()) {
List<String> cols = new ArrayList<>();
cols.add(Long.toString(entry.getKey()));
cols.add(entry.getValue().taskId());
cols.add(entry.getValue().getClass().getSimpleName());
cols.add(entry.getValue().spec().getClass().getCanonicalName());
lines.add(cols);
}
System.out.print(StringFormatter.prettyPrintGrid(lines));
}
break;
}
case "createWorker":
{
long workerId = res.getLong("workerId");
String taskId = res.getString("taskId");
TaskSpec taskSpec = JsonUtil.objectFromCommandLineArgument(res.getString("taskSpec"), TaskSpec.class);
CreateWorkerRequest req = new CreateWorkerRequest(workerId, taskId, taskSpec);
client.createWorker(req);
System.out.printf("Sent CreateWorkerRequest for worker %d%n.", req.workerId());
break;
}
case "stopWorker":
{
long workerId = res.getLong("workerId");
client.stopWorker(new StopWorkerRequest(workerId));
System.out.printf("Sent StopWorkerRequest for worker %d%n.", workerId);
break;
}
case "destroyWorker":
{
long workerId = res.getLong("workerId");
client.destroyWorker(new DestroyWorkerRequest(workerId));
System.out.printf("Sent DestroyWorkerRequest for worker %d%n.", workerId);
break;
}
case "shutdown":
{
client.invokeShutdown();
System.out.println("Sent ShutdownRequest.");
break;
}
default:
{
System.out.println("You must choose an action. Type --help for help.");
Exit.exit(1);
}
}
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project kafka by apache.
the class TransactionsCommand method execute.
static void execute(String[] args, Function<Namespace, Admin> adminSupplier, PrintStream out, Time time) throws Exception {
List<TransactionsCommand> commands = Arrays.asList(new ListTransactionsCommand(time), new DescribeTransactionsCommand(time), new DescribeProducersCommand(time), new AbortTransactionCommand(time), new FindHangingTransactionsCommand(time));
ArgumentParser parser = buildBaseParser();
Subparsers subparsers = parser.addSubparsers().dest("command").title("commands").metavar("COMMAND");
commands.forEach(command -> command.addSubparser(subparsers));
final Namespace ns;
try {
ns = parser.parseArgs(args);
} catch (ArgumentParserException e) {
parser.handleError(e);
Exit.exit(1);
return;
}
Admin admin = adminSupplier.apply(ns);
String commandName = ns.getString("command");
Optional<TransactionsCommand> commandOpt = commands.stream().filter(cmd -> cmd.name().equals(commandName)).findFirst();
if (!commandOpt.isPresent()) {
printErrorAndExit("Unexpected command " + commandName);
}
TransactionsCommand command = commandOpt.get();
command.execute(admin, ns, out);
Exit.exit(0);
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project kafka by apache.
the class TransactionsCommand method buildBaseParser.
static ArgumentParser buildBaseParser() {
ArgumentParser parser = ArgumentParsers.newArgumentParser("kafka-transactions.sh");
parser.description("This tool is used to analyze the transactional state of producers in the cluster. " + "It can be used to detect and recover from hanging transactions.");
parser.addArgument("-v", "--version").action(new PrintVersionAndExitAction()).help("show the version of this Kafka distribution and exit");
parser.addArgument("--command-config").help("property file containing configs to be passed to admin client").action(store()).type(String.class).metavar("FILE").required(false);
parser.addArgument("--bootstrap-server").help("hostname and port for the broker to connect to, in the form `host:port` " + "(multiple comma-separated entries can be given)").action(store()).type(String.class).metavar("host:port").required(true);
return parser;
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project kafka by apache.
the class VerifiableLog4jAppender method createFromArgs.
/**
* Construct a VerifiableLog4jAppender object from command-line arguments.
*/
public static VerifiableLog4jAppender createFromArgs(String[] args) {
ArgumentParser parser = argParser();
VerifiableLog4jAppender producer = null;
try {
Namespace res = parser.parseArgs(args);
int maxMessages = res.getInt("maxMessages");
String topic = res.getString("topic");
String configFile = res.getString("appender.config");
Properties props = new Properties();
props.setProperty("log4j.rootLogger", "INFO, KAFKA");
props.setProperty("log4j.appender.KAFKA", "org.apache.kafka.log4jappender.KafkaLog4jAppender");
props.setProperty("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n");
props.setProperty("log4j.appender.KAFKA.BrokerList", res.getString("brokerList"));
props.setProperty("log4j.appender.KAFKA.Topic", topic);
props.setProperty("log4j.appender.KAFKA.RequiredNumAcks", res.getString("acks"));
props.setProperty("log4j.appender.KAFKA.SyncSend", "true");
final String securityProtocol = res.getString("securityProtocol");
if (securityProtocol != null && !securityProtocol.equals(SecurityProtocol.PLAINTEXT.toString())) {
props.setProperty("log4j.appender.KAFKA.SecurityProtocol", securityProtocol);
}
if (securityProtocol != null && securityProtocol.contains("SSL")) {
props.setProperty("log4j.appender.KAFKA.SslTruststoreLocation", res.getString("sslTruststoreLocation"));
props.setProperty("log4j.appender.KAFKA.SslTruststorePassword", res.getString("sslTruststorePassword"));
}
if (securityProtocol != null && securityProtocol.contains("SASL")) {
props.setProperty("log4j.appender.KAFKA.SaslKerberosServiceName", res.getString("saslKerberosServiceName"));
props.setProperty("log4j.appender.KAFKA.clientJaasConfPath", res.getString("clientJaasConfPath"));
props.setProperty("log4j.appender.KAFKA.kerb5ConfPath", res.getString("kerb5ConfPath"));
}
props.setProperty("log4j.logger.kafka.log4j", "INFO, KAFKA");
// Changing log level from INFO to WARN as a temporary workaround for KAFKA-6415. This is to
// avoid deadlock in system tests when producer network thread appends to log while updating metadata.
props.setProperty("log4j.logger.org.apache.kafka.clients.Metadata", "WARN, KAFKA");
if (configFile != null) {
try {
props.putAll(loadProps(configFile));
} catch (IOException e) {
throw new ArgumentParserException(e.getMessage(), parser);
}
}
producer = new VerifiableLog4jAppender(props, maxMessages);
} catch (ArgumentParserException e) {
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
} else {
parser.handleError(e);
Exit.exit(1);
}
}
return producer;
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project kafka by apache.
the class VerifiableLog4jAppender method argParser.
/**
* Get the command-line argument parser.
*/
private static ArgumentParser argParser() {
ArgumentParser parser = ArgumentParsers.newArgumentParser("verifiable-log4j-appender").defaultHelp(true).description("This tool produces increasing integers to the specified topic using KafkaLog4jAppender.");
parser.addArgument("--topic").action(store()).required(true).type(String.class).metavar("TOPIC").help("Produce messages to this topic.");
parser.addArgument("--broker-list").action(store()).required(true).type(String.class).metavar("HOST1:PORT1[,HOST2:PORT2[...]]").dest("brokerList").help("Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,...");
parser.addArgument("--max-messages").action(store()).required(false).setDefault(-1).type(Integer.class).metavar("MAX-MESSAGES").dest("maxMessages").help("Produce this many messages. If -1, produce messages until the process is killed externally.");
parser.addArgument("--acks").action(store()).required(false).setDefault("-1").type(String.class).choices("0", "1", "-1").metavar("ACKS").help("Acks required on each produced message. See Kafka docs on request.required.acks for details.");
parser.addArgument("--security-protocol").action(store()).required(false).setDefault("PLAINTEXT").type(String.class).choices("PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL").metavar("SECURITY-PROTOCOL").dest("securityProtocol").help("Security protocol to be used while communicating with Kafka brokers.");
parser.addArgument("--ssl-truststore-location").action(store()).required(false).type(String.class).metavar("SSL-TRUSTSTORE-LOCATION").dest("sslTruststoreLocation").help("Location of SSL truststore to use.");
parser.addArgument("--ssl-truststore-password").action(store()).required(false).type(String.class).metavar("SSL-TRUSTSTORE-PASSWORD").dest("sslTruststorePassword").help("Password for SSL truststore to use.");
parser.addArgument("--appender.config").action(store()).required(false).type(String.class).metavar("CONFIG_FILE").help("Log4jAppender config properties file.");
parser.addArgument("--sasl-kerberos-service-name").action(store()).required(false).type(String.class).metavar("SASL-KERBEROS-SERVICE-NAME").dest("saslKerberosServiceName").help("Name of sasl kerberos service.");
parser.addArgument("--client-jaas-conf-path").action(store()).required(false).type(String.class).metavar("CLIENT-JAAS-CONF-PATH").dest("clientJaasConfPath").help("Path of JAAS config file of Kafka client.");
parser.addArgument("--kerb5-conf-path").action(store()).required(false).type(String.class).metavar("KERB5-CONF-PATH").dest("kerb5ConfPath").help("Path of Kerb5 config file.");
return parser;
}
Aggregations