use of net.sourceforge.argparse4j.inf.ArgumentParserException in project cogcomp-nlp by CogComp.
the class MainServer method startServer.
public static void startServer(String[] args, Logger logger) {
Namespace parseResults;
try {
parseResults = argumentParser.parseArgs(args);
} catch (HelpScreenException ex) {
return;
} catch (ArgumentParserException ex) {
logger.error("Exception while parsing arguments", ex);
return;
}
port(parseResults.getInt("port"));
// create a hashmap to keep track of client ip addresses and their
int rate = parseResults.getInt("rate");
if (rate > 0) {
clients = new HashMap<String, Integer>();
}
AnnotatorService finalPipeline = pipeline;
get("/annotate", "application/json", (request, response) -> {
logger.info("GET request . . . ");
boolean canServe = true;
if (rate > 0) {
resetServer();
String ip = request.ip();
int callsSofar = (Integer) clients.getOrDefault(ip, 0);
if (callsSofar > rate)
canServe = false;
clients.put(ip, callsSofar + 1);
}
if (canServe) {
logger.info("request.body(): " + request.body());
String text = request.queryParams("text");
String views = request.queryParams("views");
return annotateText(finalPipeline, text, views, logger);
} else {
response.status(429);
return "You have reached your maximum daily query limit :-/ ";
}
});
post("/annotate", (request, response) -> {
logger.info("POST request . . . ");
boolean canServe = true;
if (rate > 0) {
resetServer();
String ip = request.ip();
int callsSofar = (Integer) clients.getOrDefault(ip, 0);
if (callsSofar > rate)
canServe = false;
clients.put(ip, callsSofar + 1);
}
if (canServe) {
logger.info("request.body(): " + request.body());
Map<String, String> map = splitQuery(request.body());
System.out.println("POST body parameters parsed: " + map);
String text = map.get("text");
String views = map.get("views");
return annotateText(finalPipeline, text, views, logger);
} else {
response.status(429);
return "You have reached your maximum daily query limit :-/ ";
}
});
// api to get name of the available views
String viewsString = "";
for (String view : pipeline.getAvailableViews()) {
viewsString += ", " + view;
}
String finalViewsString = viewsString;
enableCORS("*", "*", "*");
get("/viewNames", (req, res) -> finalViewsString);
post("/viewNames", (req, res) -> finalViewsString);
}
use of net.sourceforge.argparse4j.inf.ArgumentParserException in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableConsumer method createFromArgs.
public static VerifiableConsumer createFromArgs(ArgumentParser parser, String[] args) throws ArgumentParserException {
Namespace res = parser.parseArgs(args);
String topic = res.getString("topic");
boolean useAutoCommit = res.getBoolean("useAutoCommit");
int maxMessages = res.getInt("maxMessages");
boolean verbose = res.getBoolean("verbose");
String configFile = res.getString("consumer.config");
Properties consumerProps = new Properties();
if (configFile != null) {
try {
consumerProps.putAll(Utils.loadProps(configFile));
} catch (IOException e) {
throw new ArgumentParserException(e.getMessage(), parser);
}
}
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, res.getString("groupId"));
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, res.getString("brokerList"));
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, useAutoCommit);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, res.getString("resetPolicy"));
consumerProps.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, Integer.toString(res.getInt("sessionTimeout")));
consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, res.getString("assignmentStrategy"));
StringDeserializer deserializer = new StringDeserializer();
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerProps, deserializer, deserializer);
return new VerifiableConsumer(consumer, System.out, topic, maxMessages, useAutoCommit, false, verbose);
}
use of net.sourceforge.argparse4j.inf.ArgumentParserException in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableConsumer method main.
public static void main(String[] args) {
ArgumentParser parser = argParser();
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
}
try {
final VerifiableConsumer consumer = createFromArgs(parser, args);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
consumer.close();
}
});
consumer.run();
} catch (ArgumentParserException e) {
parser.handleError(e);
Exit.exit(1);
}
}
use of net.sourceforge.argparse4j.inf.ArgumentParserException in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableLog4jAppender method createFromArgs.
/**
* Construct a VerifiableLog4jAppender object from command-line arguments.
*/
public static VerifiableLog4jAppender createFromArgs(String[] args) {
ArgumentParser parser = argParser();
VerifiableLog4jAppender producer = null;
try {
Namespace res = parser.parseArgs(args);
int maxMessages = res.getInt("maxMessages");
String topic = res.getString("topic");
String configFile = res.getString("appender.config");
Properties props = new Properties();
props.setProperty("log4j.rootLogger", "INFO, KAFKA");
props.setProperty("log4j.appender.KAFKA", "org.apache.kafka.log4jappender.KafkaLog4jAppender");
props.setProperty("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout");
props.setProperty("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n");
props.setProperty("log4j.appender.KAFKA.BrokerList", res.getString("brokerList"));
props.setProperty("log4j.appender.KAFKA.Topic", topic);
props.setProperty("log4j.appender.KAFKA.RequiredNumAcks", res.getString("acks"));
props.setProperty("log4j.appender.KAFKA.SyncSend", "true");
final String securityProtocol = res.getString("securityProtocol");
if (securityProtocol != null && !securityProtocol.equals(SecurityProtocol.PLAINTEXT.toString())) {
props.setProperty("log4j.appender.KAFKA.SecurityProtocol", securityProtocol);
}
if (securityProtocol != null && securityProtocol.contains("SSL")) {
props.setProperty("log4j.appender.KAFKA.SslTruststoreLocation", res.getString("sslTruststoreLocation"));
props.setProperty("log4j.appender.KAFKA.SslTruststorePassword", res.getString("sslTruststorePassword"));
}
if (securityProtocol != null && securityProtocol.contains("SASL")) {
props.setProperty("log4j.appender.KAFKA.SaslKerberosServiceName", res.getString("saslKerberosServiceName"));
props.setProperty("log4j.appender.KAFKA.clientJaasConfPath", res.getString("clientJaasConfPath"));
props.setProperty("log4j.appender.KAFKA.kerb5ConfPath", res.getString("kerb5ConfPath"));
}
props.setProperty("log4j.logger.kafka.log4j", "INFO, KAFKA");
// Changing log level from INFO to WARN as a temporary workaround for KAFKA-6415. This is to
// avoid deadlock in system tests when producer network thread appends to log while updating metadata.
props.setProperty("log4j.logger.org.apache.kafka.clients.Metadata", "WARN, KAFKA");
if (configFile != null) {
try {
props.putAll(loadProps(configFile));
} catch (IOException e) {
throw new ArgumentParserException(e.getMessage(), parser);
}
}
producer = new VerifiableLog4jAppender(props, maxMessages);
} catch (ArgumentParserException e) {
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
} else {
parser.handleError(e);
Exit.exit(1);
}
}
return producer;
}
use of net.sourceforge.argparse4j.inf.ArgumentParserException in project apache-kafka-on-k8s by banzaicloud.
the class VerifiableProducer method main.
public static void main(String[] args) {
ArgumentParser parser = argParser();
if (args.length == 0) {
parser.printHelp();
Exit.exit(0);
}
try {
final VerifiableProducer producer = createFromArgs(parser, args);
final long startMs = System.currentTimeMillis();
ThroughputThrottler throttler = new ThroughputThrottler(producer.throughput, startMs);
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
// Trigger main thread to stop producing messages
producer.stopProducing = true;
// Flush any remaining messages
producer.close();
// Print a summary
long stopMs = System.currentTimeMillis();
double avgThroughput = 1000 * ((producer.numAcked) / (double) (stopMs - startMs));
producer.printJson(new ToolData(producer.numSent, producer.numAcked, producer.throughput, avgThroughput));
}
});
producer.run(throttler);
} catch (ArgumentParserException e) {
parser.handleError(e);
Exit.exit(1);
}
}
Aggregations