Search in sources :

Example 1 with KafkaProducer

use of org.apache.kafka.clients.producer.KafkaProducer in project kafka by apache.

the class SimpleBenchmark method produce.

/**
     * Produce values to a topic
     * @param topic Topic to produce to
     * @param valueSizeBytes Size of value in bytes
     * @param clientId String specifying client ID
     * @param numRecords Number of records to produce
     * @param sequential if True, then keys are produced sequentially from 0 to upperRange. In this case upperRange must be >= numRecords.
     *                   if False, then keys are produced randomly in range [0, upperRange)
     * @param printStats if True, print stats on how long producing took. If False, don't print stats. False can be used
     *                   when this produce step is part of another benchmark that produces its own stats
     */
private void produce(String topic, int valueSizeBytes, String clientId, int numRecords, boolean sequential, int upperRange, boolean printStats) throws Exception {
    processedRecords = 0;
    processedBytes = 0;
    if (sequential) {
        if (upperRange < numRecords)
            throw new Exception("UpperRange must be >= numRecords");
    }
    if (!sequential) {
        System.out.println("WARNING: You are using non-sequential keys. If your tests' exit logic expects to see a final key, random keys may not work.");
    }
    Properties props = setProduceConsumeProperties(clientId);
    int key = 0;
    Random rand = new Random();
    KafkaProducer<Integer, byte[]> producer = new KafkaProducer<>(props);
    byte[] value = new byte[valueSizeBytes];
    // put some random values to increase entropy. Some devices
    // like SSDs do compression and if the array is all zeros
    // the performance will be too good.
    new Random().nextBytes(value);
    long startTime = System.currentTimeMillis();
    if (sequential)
        key = 0;
    else
        key = rand.nextInt(upperRange);
    for (int i = 0; i < numRecords; i++) {
        producer.send(new ProducerRecord<>(topic, key, value));
        if (sequential)
            key++;
        else
            key = rand.nextInt(upperRange);
        processedRecords++;
        processedBytes += value.length + Integer.SIZE;
    }
    producer.close();
    long endTime = System.currentTimeMillis();
    if (printStats) {
        printResults("Producer Performance [records/latency/rec-sec/MB-sec write]: ", endTime - startTime);
    }
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) Random(java.util.Random) Properties(java.util.Properties)

Example 2 with KafkaProducer

use of org.apache.kafka.clients.producer.KafkaProducer in project kafka by apache.

the class SmokeTestDriver method generate.

public static Map<String, Set<Integer>> generate(String kafka, final int numKeys, final int maxRecordsPerKey) throws Exception {
    Properties props = new Properties();
    props.put(ProducerConfig.CLIENT_ID_CONFIG, "SmokeTest");
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
    KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(props);
    int numRecordsProduced = 0;
    Map<String, Set<Integer>> allData = new HashMap<>();
    ValueList[] data = new ValueList[numKeys];
    for (int i = 0; i < numKeys; i++) {
        data[i] = new ValueList(i, i + maxRecordsPerKey - 1);
        allData.put(data[i].key, new HashSet<Integer>());
    }
    Random rand = new Random();
    int remaining = data.length;
    while (remaining > 0) {
        int index = rand.nextInt(remaining);
        String key = data[index].key;
        int value = data[index].next();
        if (value < 0) {
            remaining--;
            data[index] = data[remaining];
        } else {
            ProducerRecord<byte[], byte[]> record = new ProducerRecord<>("data", stringSerde.serializer().serialize("", key), intSerde.serializer().serialize("", value));
            producer.send(record, new Callback() {

                @Override
                public void onCompletion(final RecordMetadata metadata, final Exception exception) {
                    if (exception != null) {
                        exception.printStackTrace();
                        Exit.exit(1);
                    }
                }
            });
            numRecordsProduced++;
            allData.get(key).add(value);
            if (numRecordsProduced % 100 == 0)
                System.out.println(numRecordsProduced + " records produced");
            Utils.sleep(2);
        }
    }
    producer.close();
    return Collections.unmodifiableMap(allData);
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) HashSet(java.util.HashSet) Set(java.util.Set) HashMap(java.util.HashMap) Properties(java.util.Properties) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) Callback(org.apache.kafka.clients.producer.Callback) Random(java.util.Random) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord)

Example 3 with KafkaProducer

use of org.apache.kafka.clients.producer.KafkaProducer in project kafka by apache.

the class ClientCompatibilityTest method testProduce.

public void testProduce() throws Exception {
    Properties producerProps = new Properties();
    producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, testConfig.bootstrapServer);
    ByteArraySerializer serializer = new ByteArraySerializer();
    KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(producerProps, serializer, serializer);
    ProducerRecord<byte[], byte[]> record1 = new ProducerRecord<>(testConfig.topic, message1);
    Future<RecordMetadata> future1 = producer.send(record1);
    ProducerRecord<byte[], byte[]> record2 = new ProducerRecord<>(testConfig.topic, message2);
    Future<RecordMetadata> future2 = producer.send(record2);
    producer.flush();
    future1.get();
    future2.get();
    producer.close();
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Properties(java.util.Properties) ByteArraySerializer(org.apache.kafka.common.serialization.ByteArraySerializer)

Example 4 with KafkaProducer

use of org.apache.kafka.clients.producer.KafkaProducer in project kafka by apache.

the class ShutdownDeadlockTest method start.

public void start() {
    final String topic = "source";
    final Properties props = new Properties();
    props.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "shouldNotDeadlock");
    props.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
    final KStreamBuilder builder = new KStreamBuilder();
    final KStream<String, String> source = builder.stream(Serdes.String(), Serdes.String(), topic);
    source.foreach(new ForeachAction<String, String>() {

        @Override
        public void apply(final String key, final String value) {
            throw new RuntimeException("KABOOM!");
        }
    });
    final KafkaStreams streams = new KafkaStreams(builder, props);
    streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {

        @Override
        public void uncaughtException(final Thread t, final Throwable e) {
            Exit.exit(1);
        }
    });
    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {

        @Override
        public void run() {
            streams.close(5, TimeUnit.SECONDS);
        }
    }));
    final Properties producerProps = new Properties();
    producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, "SmokeTest");
    producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka);
    producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    final KafkaProducer<String, String> producer = new KafkaProducer<>(producerProps);
    producer.send(new ProducerRecord<>(topic, "a", "a"));
    producer.flush();
    streams.start();
    synchronized (this) {
        try {
            wait();
        } catch (InterruptedException e) {
        // ignored
        }
    }
}
Also used : KStreamBuilder(org.apache.kafka.streams.kstream.KStreamBuilder) KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) KafkaStreams(org.apache.kafka.streams.KafkaStreams) Properties(java.util.Properties)

Example 5 with KafkaProducer

use of org.apache.kafka.clients.producer.KafkaProducer in project storm by apache.

the class KafkaUtilsTest method createTopicAndSendMessage.

private void createTopicAndSendMessage(String key, String value) {
    Properties p = new Properties();
    p.put("acks", "1");
    p.put("bootstrap.servers", broker.getBrokerConnectionString());
    p.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    p.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
    p.put("metadata.fetch.timeout.ms", 1000);
    KafkaProducer<String, String> producer = new KafkaProducer<String, String>(p);
    try {
        producer.send(new ProducerRecord<String, String>(config.topic, key, value)).get();
    } catch (Exception e) {
        Assert.fail(e.getMessage());
        LOG.error("Failed to do synchronous sending due to " + e, e);
    } finally {
        producer.close();
    }
}
Also used : KafkaProducer(org.apache.kafka.clients.producer.KafkaProducer) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) Properties(java.util.Properties)

Aggregations

KafkaProducer (org.apache.kafka.clients.producer.KafkaProducer)19 Properties (java.util.Properties)12 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)8 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)4 ArrayList (java.util.ArrayList)3 Random (java.util.Random)3 Test (org.junit.Test)3 ISE (io.druid.java.util.common.ISE)2 IOException (java.io.IOException)2 InputStream (java.io.InputStream)2 HashMap (java.util.HashMap)2 Map (java.util.Map)2 TopicExistsException (kafka.common.TopicExistsException)2 ZkClient (org.I0Itec.zkclient.ZkClient)2 Callback (org.apache.kafka.clients.producer.Callback)2 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)2 KafkaStreams (org.apache.kafka.streams.KafkaStreams)2 DateTime (org.joda.time.DateTime)2 DateTimeZone (org.joda.time.DateTimeZone)2 DateTimeFormatter (org.joda.time.format.DateTimeFormatter)2