Search in sources :

Example 6 with ArgumentParserException

use of net.sourceforge.argparse4j.inf.ArgumentParserException in project kafka by apache.

the class ProducerPerformance method main.

public static void main(String[] args) throws Exception {
    ArgumentParser parser = argParser();
    try {
        Namespace res = parser.parseArgs(args);
        /* parse args */
        String topicName = res.getString("topic");
        long numRecords = res.getLong("numRecords");
        Integer recordSize = res.getInt("recordSize");
        int throughput = res.getInt("throughput");
        List<String> producerProps = res.getList("producerConfig");
        String producerConfig = res.getString("producerConfigFile");
        String payloadFilePath = res.getString("payloadFile");
        // since default value gets printed with the help text, we are escaping \n there and replacing it with correct value here.
        String payloadDelimiter = res.getString("payloadDelimiter").equals("\\n") ? "\n" : res.getString("payloadDelimiter");
        if (producerProps == null && producerConfig == null) {
            throw new ArgumentParserException("Either --producer-props or --producer.config must be specified.", parser);
        }
        List<byte[]> payloadByteList = new ArrayList<>();
        if (payloadFilePath != null) {
            Path path = Paths.get(payloadFilePath);
            System.out.println("Reading payloads from: " + path.toAbsolutePath());
            if (Files.notExists(path) || Files.size(path) == 0) {
                throw new IllegalArgumentException("File does not exist or empty file provided.");
            }
            String[] payloadList = new String(Files.readAllBytes(path), "UTF-8").split(payloadDelimiter);
            System.out.println("Number of messages read: " + payloadList.length);
            for (String payload : payloadList) {
                payloadByteList.add(payload.getBytes(StandardCharsets.UTF_8));
            }
        }
        Properties props = new Properties();
        if (producerConfig != null) {
            props.putAll(Utils.loadProps(producerConfig));
        }
        if (producerProps != null)
            for (String prop : producerProps) {
                String[] pieces = prop.split("=");
                if (pieces.length != 2)
                    throw new IllegalArgumentException("Invalid property: " + prop);
                props.put(pieces[0], pieces[1]);
            }
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer");
        KafkaProducer<byte[], byte[]> producer = new KafkaProducer<byte[], byte[]>(props);
        /* setup perf test */
        byte[] payload = null;
        Random random = new Random(0);
        if (recordSize != null) {
            payload = new byte[recordSize];
            for (int i = 0; i < payload.length; ++i) payload[i] = (byte) (random.nextInt(26) + 65);
        }
        ProducerRecord<byte[], byte[]> record;
        Stats stats = new Stats(numRecords, 5000);
        long startMs = System.currentTimeMillis();
        ThroughputThrottler throttler = new ThroughputThrottler(throughput, startMs);
        for (int i = 0; i < numRecords; i++) {
            if (payloadFilePath != null) {
                payload = payloadByteList.get(random.nextInt(payloadByteList.size()));
            }
            record = new ProducerRecord<>(topicName, payload);
            long sendStartMs = System.currentTimeMillis();
            Callback cb = stats.nextCompletion(sendStartMs, payload.length, stats);
            producer.send(record, cb);
            if (throttler.shouldThrottle(i, sendStartMs)) {
                throttler.throttle();
            }
        }
        /* print final results */
        producer.close();
        stats.printTotal();
    } catch (ArgumentParserException e) {
        if (args.length == 0) {
            parser.printHelp();
            Exit.exit(0);
        } else {
            parser.handleError(e);
            Exit.exit(1);
        }
    }
}
Also used : Path(java.nio.file.Path) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ArrayList(java.util.ArrayList) Properties(java.util.Properties) ArgumentParser(net.sourceforge.argparse4j.inf.ArgumentParser) Namespace(net.sourceforge.argparse4j.inf.Namespace) Callback(org.apache.kafka.clients.producer.Callback) Random(java.util.Random) ArgumentParserException(net.sourceforge.argparse4j.inf.ArgumentParserException)

Example 7 with ArgumentParserException

use of net.sourceforge.argparse4j.inf.ArgumentParserException in project kafka by apache.

the class VerifiableConsumer method main.

public static void main(String[] args) {
    ArgumentParser parser = argParser();
    if (args.length == 0) {
        parser.printHelp();
        Exit.exit(0);
    }
    try {
        final VerifiableConsumer consumer = createFromArgs(parser, args);
        Runtime.getRuntime().addShutdownHook(new Thread() {

            @Override
            public void run() {
                consumer.close();
            }
        });
        consumer.run();
    } catch (ArgumentParserException e) {
        parser.handleError(e);
        Exit.exit(1);
    }
}
Also used : ArgumentParserException(net.sourceforge.argparse4j.inf.ArgumentParserException) ArgumentParser(net.sourceforge.argparse4j.inf.ArgumentParser)

Example 8 with ArgumentParserException

use of net.sourceforge.argparse4j.inf.ArgumentParserException in project kafka by apache.

the class VerifiableProducer method createFromArgs.

/** Construct a VerifiableProducer object from command-line arguments. */
public static VerifiableProducer createFromArgs(String[] args) {
    ArgumentParser parser = argParser();
    VerifiableProducer producer = null;
    try {
        Namespace res;
        res = parser.parseArgs(args);
        int maxMessages = res.getInt("maxMessages");
        String topic = res.getString("topic");
        int throughput = res.getInt("throughput");
        String configFile = res.getString("producer.config");
        Integer valuePrefix = res.getInt("valuePrefix");
        Properties producerProps = new Properties();
        producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, res.getString("brokerList"));
        producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
        producerProps.put(ProducerConfig.ACKS_CONFIG, Integer.toString(res.getInt("acks")));
        // No producer retries
        producerProps.put("retries", "0");
        if (configFile != null) {
            try {
                producerProps.putAll(loadProps(configFile));
            } catch (IOException e) {
                throw new ArgumentParserException(e.getMessage(), parser);
            }
        }
        producer = new VerifiableProducer(producerProps, topic, throughput, maxMessages, valuePrefix);
    } catch (ArgumentParserException e) {
        if (args.length == 0) {
            parser.printHelp();
            Exit.exit(0);
        } else {
            parser.handleError(e);
            Exit.exit(1);
        }
    }
    return producer;
}
Also used : IOException(java.io.IOException) ArgumentParserException(net.sourceforge.argparse4j.inf.ArgumentParserException) Properties(java.util.Properties) ArgumentParser(net.sourceforge.argparse4j.inf.ArgumentParser) Namespace(net.sourceforge.argparse4j.inf.Namespace)

Example 9 with ArgumentParserException

use of net.sourceforge.argparse4j.inf.ArgumentParserException in project helios by spotify.

the class CliParser method computeTargets.

private List<Target> computeTargets(final ArgumentParser parser, final List<String> explicitEndpoints, final List<String> domainsArguments, final String srvName) {
    if (explicitEndpoints != null && !explicitEndpoints.isEmpty()) {
        final List<Target> targets = Lists.newArrayListWithExpectedSize(explicitEndpoints.size());
        for (final String endpoint : explicitEndpoints) {
            targets.add(Target.from(URI.create(endpoint)));
        }
        return targets;
    } else if (domainsArguments != null && !domainsArguments.isEmpty()) {
        final Iterable<String> domains = parseDomains(domainsArguments);
        return Target.from(srvName, domains);
    } else if (!cliConfig.getMasterEndpoints().isEmpty()) {
        final List<URI> cliConfigMasterEndpoints = cliConfig.getMasterEndpoints();
        final List<Target> targets = Lists.newArrayListWithExpectedSize(cliConfigMasterEndpoints.size());
        for (final URI endpoint : cliConfigMasterEndpoints) {
            targets.add(Target.from(endpoint));
        }
        return targets;
    } else if (!cliConfig.getDomainsString().isEmpty()) {
        final Iterable<String> domains = parseDomainsString(cliConfig.getDomainsString());
        return Target.from(srvName, domains);
    }
    handleError(parser, new ArgumentParserException("no masters specified.  Use the -z or -d option to specify which helios " + "cluster/master to connect to", parser));
    return ImmutableList.of();
}
Also used : ArgumentParserException(net.sourceforge.argparse4j.inf.ArgumentParserException) URI(java.net.URI)

Aggregations

ArgumentParserException (net.sourceforge.argparse4j.inf.ArgumentParserException)9 Namespace (net.sourceforge.argparse4j.inf.Namespace)6 ArgumentParser (net.sourceforge.argparse4j.inf.ArgumentParser)5 Properties (java.util.Properties)4 IOException (java.io.IOException)3 AnnotatorService (edu.illinois.cs.cogcomp.annotation.AnnotatorService)1 URI (java.net.URI)1 Path (java.nio.file.Path)1 ArrayList (java.util.ArrayList)1 Random (java.util.Random)1 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1 HelpScreenException (net.sourceforge.argparse4j.internal.HelpScreenException)1 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)1 Callback (org.apache.kafka.clients.producer.Callback)1 KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)1 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)1 Signal (sun.misc.Signal)1 SignalHandler (sun.misc.SignalHandler)1