Search in sources :

Example 26 with Consumer

use of org.apache.kafka.clients.consumer.Consumer in project brave by openzipkin.

the class TracingConsumerTest method should_create_only_one_consumer_span_per_topic.

@Test
public void should_create_only_one_consumer_span_per_topic() {
    Map<TopicPartition, Long> offsets = new HashMap<>();
    // 2 partitions in the same topic
    offsets.put(new TopicPartition(TEST_TOPIC, 0), 0L);
    offsets.put(new TopicPartition(TEST_TOPIC, 1), 0L);
    consumer.updateBeginningOffsets(offsets);
    consumer.assign(offsets.keySet());
    // create 500 messages
    for (int i = 0; i < 250; i++) {
        consumer.addRecord(new ConsumerRecord<>(TEST_TOPIC, 0, i, TEST_KEY, TEST_VALUE));
        consumer.addRecord(new ConsumerRecord<>(TEST_TOPIC, 1, i, TEST_KEY, TEST_VALUE));
    }
    Consumer<String, String> tracingConsumer = kafkaTracing.consumer(consumer);
    tracingConsumer.poll(10);
    // only one consumer span reported
    assertThat(spans).hasSize(1).flatExtracting(s -> s.tags().entrySet()).containsOnly(entry("kafka.topic", "myTopic"));
}
Also used : Consumer(org.apache.kafka.clients.consumer.Consumer) TopicPartition(org.apache.kafka.common.TopicPartition) MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Span(zipkin2.Span) HashMap(java.util.HashMap) Test(org.junit.Test) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) OffsetResetStrategy(org.apache.kafka.clients.consumer.OffsetResetStrategy) Assertions.entry(org.assertj.core.api.Assertions.entry) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Map(java.util.Map) AssertionsForInterfaceTypes.assertThat(org.assertj.core.api.AssertionsForInterfaceTypes.assertThat) Before(org.junit.Before) HashMap(java.util.HashMap) TopicPartition(org.apache.kafka.common.TopicPartition) Test(org.junit.Test)

Example 27 with Consumer

use of org.apache.kafka.clients.consumer.Consumer in project incubator-gobblin by apache.

the class KafkaSimpleStreamingSource method getKafkaConsumer.

public static Consumer getKafkaConsumer(Config config) {
    List<String> brokers = ConfigUtils.getStringList(config, ConfigurationKeys.KAFKA_BROKERS);
    Properties props = new Properties();
    props.put("bootstrap.servers", Joiner.on(",").join(brokers));
    props.put("group.id", ConfigUtils.getString(config, ConfigurationKeys.JOB_NAME_KEY, StringUtils.EMPTY));
    props.put("enable.auto.commit", "false");
    Preconditions.checkArgument(config.hasPath(TOPIC_KEY_DESERIALIZER));
    props.put("key.deserializer", config.getString(TOPIC_KEY_DESERIALIZER));
    Preconditions.checkArgument(config.hasPath(TOPIC_VALUE_DESERIALIZER));
    props.put("value.deserializer", config.getString(TOPIC_VALUE_DESERIALIZER));
    // pass along any config scoped under source.kafka.config
    // one use case of this is to pass SSL configuration
    Config scopedConfig = ConfigUtils.getConfigOrEmpty(config, KAFKA_CONSUMER_CONFIG_PREFIX);
    props.putAll(ConfigUtils.configToProperties(scopedConfig));
    Consumer consumer = null;
    try {
        consumer = new KafkaConsumer<>(props);
    } catch (Exception e) {
        LOG.error("Exception when creating Kafka consumer - {}", e);
        throw Throwables.propagate(e);
    }
    return consumer;
}
Also used : Consumer(org.apache.kafka.clients.consumer.Consumer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Config(com.typesafe.config.Config) Properties(java.util.Properties) IOException(java.io.IOException)

Example 28 with Consumer

use of org.apache.kafka.clients.consumer.Consumer in project apache-kafka-on-k8s by banzaicloud.

the class AbstractTaskTest method shouldNotAttemptToLockIfNoStores.

@Test
public void shouldNotAttemptToLockIfNoStores() {
    final Consumer consumer = EasyMock.createNiceMock(Consumer.class);
    EasyMock.replay(stateDirectory);
    final AbstractTask task = createTask(consumer, Collections.<StateStore, String>emptyMap());
    task.registerStateStores();
    // should fail if lock is called
    EasyMock.verify(stateDirectory);
}
Also used : MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Consumer(org.apache.kafka.clients.consumer.Consumer) Test(org.junit.Test)

Example 29 with Consumer

use of org.apache.kafka.clients.consumer.Consumer in project apache-kafka-on-k8s by banzaicloud.

the class AbstractTaskTest method shouldThrowProcessorStateExceptionOnInitializeOffsetsWhenAuthorizationException.

@Test(expected = ProcessorStateException.class)
public void shouldThrowProcessorStateExceptionOnInitializeOffsetsWhenAuthorizationException() {
    final Consumer consumer = mockConsumer(new AuthorizationException("blah"));
    final AbstractTask task = createTask(consumer, Collections.<StateStore, String>emptyMap());
    task.updateOffsetLimits();
}
Also used : MockConsumer(org.apache.kafka.clients.consumer.MockConsumer) Consumer(org.apache.kafka.clients.consumer.Consumer) AuthorizationException(org.apache.kafka.common.errors.AuthorizationException) Test(org.junit.Test)

Example 30 with Consumer

use of org.apache.kafka.clients.consumer.Consumer in project core-ng-project by neowu.

the class KafkaMessageListenerThread method process.

private void process(Consumer<String, byte[]> consumer, ConsumerRecords<String, byte[]> kafkaRecords) {
    StopWatch watch = new StopWatch();
    int count = 0;
    int size = 0;
    try {
        Map<String, List<ConsumerRecord<String, byte[]>>> messages = Maps.newLinkedHashMap();
        for (ConsumerRecord<String, byte[]> record : kafkaRecords) {
            messages.computeIfAbsent(record.topic(), key -> Lists.newArrayList()).add(record);
            count++;
            size += record.value().length;
        }
        for (Map.Entry<String, List<ConsumerRecord<String, byte[]>>> entry : messages.entrySet()) {
            String topic = entry.getKey();
            List<ConsumerRecord<String, byte[]>> records = entry.getValue();
            KafkaMessageListener.BulkMessageHandlerHolder<?> bulkHandlerHolder = bulkHandlerHolders.get(topic);
            if (bulkHandlerHolder != null) {
                handle(topic, bulkHandlerHolder, records, longProcessThreshold(batchLongProcessThresholdInNano, records.size(), count));
            } else {
                KafkaMessageListener.MessageHandlerHolder<?> handlerHolder = handlerHolders.get(topic);
                if (handlerHolder != null) {
                    handle(topic, handlerHolder, records, longProcessThreshold(batchLongProcessThresholdInNano, 1, count));
                }
            }
        }
    } finally {
        consumer.commitAsync();
        logger.info("process kafka records, count={}, size={}, elapsedTime={}", count, size, watch.elapsedTime());
    }
}
Also used : Markers(core.framework.log.Markers) Consumer(org.apache.kafka.clients.consumer.Consumer) Logger(org.slf4j.Logger) Message(core.framework.kafka.Message) Headers(org.apache.kafka.common.header.Headers) LoggerFactory(org.slf4j.LoggerFactory) ActionLog(core.framework.impl.log.ActionLog) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Threads(core.framework.util.Threads) ConsumerRecords(org.apache.kafka.clients.consumer.ConsumerRecords) BytesParam(core.framework.impl.log.filter.BytesParam) StopWatch(core.framework.util.StopWatch) ArrayList(java.util.ArrayList) Lists(core.framework.util.Lists) List(java.util.List) Header(org.apache.kafka.common.header.Header) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) Duration(java.time.Duration) Map(java.util.Map) Maps(core.framework.util.Maps) LogManager(core.framework.impl.log.LogManager) Charsets(core.framework.util.Charsets) ConsumerRecord(org.apache.kafka.clients.consumer.ConsumerRecord) StopWatch(core.framework.util.StopWatch) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map)

Aggregations

Consumer (org.apache.kafka.clients.consumer.Consumer)35 Test (org.junit.Test)22 Map (java.util.Map)20 TopicPartition (org.apache.kafka.common.TopicPartition)20 ArrayList (java.util.ArrayList)17 HashMap (java.util.HashMap)17 List (java.util.List)17 Collections (java.util.Collections)16 Set (java.util.Set)16 Properties (java.util.Properties)15 Collection (java.util.Collection)14 MockConsumer (org.apache.kafka.clients.consumer.MockConsumer)14 Collectors (java.util.stream.Collectors)13 KafkaException (org.apache.kafka.common.KafkaException)11 HashSet (java.util.HashSet)10 ConsumerRecord (org.apache.kafka.clients.consumer.ConsumerRecord)10 StreamsConfig (org.apache.kafka.streams.StreamsConfig)10 Logger (org.slf4j.Logger)10 ConsumerRecords (org.apache.kafka.clients.consumer.ConsumerRecords)9 PartitionInfo (org.apache.kafka.common.PartitionInfo)9