use of net.sourceforge.argparse4j.inf.ArgumentParser in project apache-kafka-on-k8s by banzaicloud.
the class TransactionalMessageCopier method argParser.
/**
* Get the command-line argument parser.
*/
private static ArgumentParser argParser() {
ArgumentParser parser = ArgumentParsers.newArgumentParser("transactional-message-copier").defaultHelp(true).description("This tool copies messages transactionally from an input partition to an output topic, " + "committing the consumed offsets along with the output messages");
parser.addArgument("--input-topic").action(store()).required(true).type(String.class).metavar("INPUT-TOPIC").dest("inputTopic").help("Consume messages from this topic");
parser.addArgument("--input-partition").action(store()).required(true).type(Integer.class).metavar("INPUT-PARTITION").dest("inputPartition").help("Consume messages from this partition of the input topic.");
parser.addArgument("--output-topic").action(store()).required(true).type(String.class).metavar("OUTPUT-TOPIC").dest("outputTopic").help("Produce messages to this topic");
parser.addArgument("--broker-list").action(store()).required(true).type(String.class).metavar("HOST1:PORT1[,HOST2:PORT2[...]]").dest("brokerList").help("Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,...");
parser.addArgument("--max-messages").action(store()).required(false).setDefault(-1).type(Integer.class).metavar("MAX-MESSAGES").dest("maxMessages").help("Process these many messages upto the end offset at the time this program was launched. If set to -1 " + "we will just read to the end offset of the input partition (as of the time the program was launched).");
parser.addArgument("--consumer-group").action(store()).required(false).setDefault(-1).type(String.class).metavar("CONSUMER-GROUP").dest("consumerGroup").help("The consumer group id to use for storing the consumer offsets.");
parser.addArgument("--transaction-size").action(store()).required(false).setDefault(200).type(Integer.class).metavar("TRANSACTION-SIZE").dest("messagesPerTransaction").help("The number of messages to put in each transaction. Default is 200.");
parser.addArgument("--transactional-id").action(store()).required(true).type(String.class).metavar("TRANSACTIONAL-ID").dest("transactionalId").help("The transactionalId to assign to the producer");
parser.addArgument("--enable-random-aborts").action(storeTrue()).type(Boolean.class).metavar("ENABLE-RANDOM-ABORTS").dest("enableRandomAborts").help("Whether or not to enable random transaction aborts (for system testing)");
return parser;
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableConsumer method main.
public static void main(String[] args) {
ArgumentParser parser = argParser();
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
}
try {
final VerifiableConsumer consumer = createFromArgs(parser, args);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
consumer.close();
}
});
consumer.run();
} catch (ArgumentParserException e) {
parser.handleError(e);
Exit.exit(1);
}
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableLog4jAppender method createFromArgs.
/**
* Construct a VerifiableLog4jAppender object from command-line arguments.
*/
public static VerifiableLog4jAppender createFromArgs(String[] args) {
ArgumentParser parser = argParser();
VerifiableLog4jAppender producer = null;
try {
Namespace res = parser.parseArgs(args);
int maxMessages = res.getInt("maxMessages");
String topic = res.getString("topic");
String configFile = res.getString("appender.config");
Properties props = new Properties();
props.setProperty("log4j.rootLogger", "INFO, KAFKA");
props.setProperty("log4j.appender.KAFKA", "org.apache.kafka.log4jappender.KafkaLog4jAppender");
props.setProperty("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n");
props.setProperty("log4j.appender.KAFKA.BrokerList", res.getString("brokerList"));
props.setProperty("log4j.appender.KAFKA.Topic", topic);
props.setProperty("log4j.appender.KAFKA.RequiredNumAcks", res.getString("acks"));
props.setProperty("log4j.appender.KAFKA.SyncSend", "true");
final String securityProtocol = res.getString("securityProtocol");
if (securityProtocol != null && !securityProtocol.equals(SecurityProtocol.PLAINTEXT.toString())) {
props.setProperty("log4j.appender.KAFKA.SecurityProtocol", securityProtocol);
}
if (securityProtocol != null && securityProtocol.contains("SSL")) {
props.setProperty("log4j.appender.KAFKA.SslTruststoreLocation", res.getString("sslTruststoreLocation"));
props.setProperty("log4j.appender.KAFKA.SslTruststorePassword", res.getString("sslTruststorePassword"));
}
if (securityProtocol != null && securityProtocol.contains("SASL")) {
props.setProperty("log4j.appender.KAFKA.SaslKerberosServiceName", res.getString("saslKerberosServiceName"));
props.setProperty("log4j.appender.KAFKA.clientJaasConfPath", res.getString("clientJaasConfPath"));
props.setProperty("log4j.appender.KAFKA.kerb5ConfPath", res.getString("kerb5ConfPath"));
}
props.setProperty("log4j.logger.kafka.log4j", "INFO, KAFKA");
// Changing log level from INFO to WARN as a temporary workaround for KAFKA-6415. This is to
// avoid deadlock in system tests when producer network thread appends to log while updating metadata.
props.setProperty("log4j.logger.org.apache.kafka.clients.Metadata", "WARN, KAFKA");
if (configFile != null) {
try {
props.putAll(loadProps(configFile));
} catch (IOException e) {
throw new ArgumentParserException(e.getMessage(), parser);
}
}
producer = new VerifiableLog4jAppender(props, maxMessages);
} catch (ArgumentParserException e) {
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
} else {
parser.handleError(e);
Exit.exit(1);
}
}
return producer;
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableLog4jAppender method argParser.
/**
* Get the command-line argument parser.
*/
private static ArgumentParser argParser() {
ArgumentParser parser = ArgumentParsers.newArgumentParser("verifiable-log4j-appender").defaultHelp(true).description("This tool produces increasing integers to the specified topic using KafkaLog4jAppender.");
parser.addArgument("--topic").action(store()).required(true).type(String.class).metavar("TOPIC").help("Produce messages to this topic.");
parser.addArgument("--broker-list").action(store()).required(true).type(String.class).metavar("HOST1:PORT1[,HOST2:PORT2[...]]").dest("brokerList").help("Comma-separated list of Kafka brokers in the form HOST1:PORT1,HOST2:PORT2,...");
parser.addArgument("--max-messages").action(store()).required(false).setDefault(-1).type(Integer.class).metavar("MAX-MESSAGES").dest("maxMessages").help("Produce this many messages. If -1, produce messages until the process is killed externally.");
parser.addArgument("--acks").action(store()).required(false).setDefault("-1").type(String.class).choices("0", "1", "-1").metavar("ACKS").help("Acks required on each produced message. See Kafka docs on request.required.acks for details.");
parser.addArgument("--security-protocol").action(store()).required(false).setDefault("PLAINTEXT").type(String.class).choices("PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL").metavar("SECURITY-PROTOCOL").dest("securityProtocol").help("Security protocol to be used while communicating with Kafka brokers.");
parser.addArgument("--ssl-truststore-location").action(store()).required(false).type(String.class).metavar("SSL-TRUSTSTORE-LOCATION").dest("sslTruststoreLocation").help("Location of SSL truststore to use.");
parser.addArgument("--ssl-truststore-password").action(store()).required(false).type(String.class).metavar("SSL-TRUSTSTORE-PASSWORD").dest("sslTruststorePassword").help("Password for SSL truststore to use.");
parser.addArgument("--appender.config").action(store()).required(false).type(String.class).metavar("CONFIG_FILE").help("Log4jAppender config properties file.");
parser.addArgument("--sasl-kerberos-service-name").action(store()).required(false).type(String.class).metavar("SASL-KERBEROS-SERVICE-NAME").dest("saslKerberosServiceName").help("Name of sasl kerberos service.");
parser.addArgument("--client-jaas-conf-path").action(store()).required(false).type(String.class).metavar("CLIENT-JAAS-CONF-PATH").dest("clientJaasConfPath").help("Path of JAAS config file of Kafka client.");
parser.addArgument("--kerb5-conf-path").action(store()).required(false).type(String.class).metavar("KERB5-CONF-PATH").dest("kerb5ConfPath").help("Path of Kerb5 config file.");
return parser;
}
use of net.sourceforge.argparse4j.inf.ArgumentParser in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableProducer method main.
public static void main(String[] args) {
ArgumentParser parser = argParser();
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
}
try {
final VerifiableProducer producer = createFromArgs(parser, args);
final long startMs = System.currentTimeMillis();
ThroughputThrottler throttler = new ThroughputThrottler(producer.throughput, startMs);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
// Trigger main thread to stop producing messages
producer.stopProducing = true;
// Flush any remaining messages
producer.close();
// Print a summary
long stopMs = System.currentTimeMillis();
double avgThroughput = 1000 * ((producer.numAcked) / (double) (stopMs - startMs));
producer.printJson(new ToolData(producer.numSent, producer.numAcked, producer.throughput, avgThroughput));
}
});
producer.run(throttler);
} catch (ArgumentParserException e) {
parser.handleError(e);
Exit.exit(1);
}
}
Aggregations