use of org.apache.kafka.clients.consumer.Consumer in project nakadi by zalando.
the class KafkaTopicRepositoryTest method createKafkaFactory.
@SuppressWarnings("unchecked")
private KafkaFactory createKafkaFactory() {
// Consumer
final Consumer consumer = mock(Consumer.class);
allTopics().forEach(topic -> when(consumer.partitionsFor(topic)).thenReturn(partitionsOfTopic(topic)));
doAnswer(invocation -> {
offsetMode = ConsumerOffsetMode.EARLIEST;
return null;
}).when(consumer).seekToBeginning(anyVararg());
doAnswer(invocation -> {
offsetMode = ConsumerOffsetMode.LATEST;
return null;
}).when(consumer).seekToEnd(anyVararg());
when(consumer.position(any())).thenAnswer(invocation -> {
final org.apache.kafka.common.TopicPartition tp = (org.apache.kafka.common.TopicPartition) invocation.getArguments()[0];
return PARTITIONS.stream().filter(ps -> ps.topic.equals(tp.topic()) && ps.partition == tp.partition()).findFirst().map(ps -> offsetMode == ConsumerOffsetMode.LATEST ? ps.latestOffset : ps.earliestOffset).orElseThrow(KafkaException::new);
});
// KafkaProducer
when(kafkaProducer.send(EXPECTED_PRODUCER_RECORD)).thenReturn(mock(Future.class));
// KafkaFactory
final KafkaFactory kafkaFactory = mock(KafkaFactory.class);
when(kafkaFactory.getConsumer(KAFKA_CLIENT_ID)).thenReturn(consumer);
when(kafkaFactory.getConsumer()).thenReturn(consumer);
when(kafkaFactory.takeProducer()).thenReturn(kafkaProducer);
return kafkaFactory;
}
use of org.apache.kafka.clients.consumer.Consumer in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicEndStatistics.
@Override
public List<PartitionEndStatistics> loadTopicEndStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = newArrayList(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToEnd(kafkaTPs);
return backMap.entrySet().stream().map(e -> {
final TopicPartition tp = e.getKey();
final Timeline timeline = e.getValue();
return new KafkaPartitionEndStatistics(timeline, tp.partition(), consumer.position(tp) - 1);
}).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.apache.kafka.clients.consumer.Consumer in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicStatistics.
@Override
public List<PartitionStatistics> loadTopicStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = new ArrayList<>(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToBeginning(kafkaTPs);
final long[] begins = kafkaTPs.stream().mapToLong(consumer::position).toArray();
consumer.seekToEnd(kafkaTPs);
final long[] ends = kafkaTPs.stream().mapToLong(consumer::position).toArray();
return IntStream.range(0, kafkaTPs.size()).mapToObj(i -> new KafkaPartitionStatistics(backMap.get(kafkaTPs.get(i)), kafkaTPs.get(i).partition(), begins[i], ends[i] - 1)).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.apache.kafka.clients.consumer.Consumer in project nakadi by zalando.
the class KafkaTopicRepository method loadPartitionStatistics.
@Override
public List<Optional<PartitionStatistics>> loadPartitionStatistics(final Collection<TimelinePartition> partitions) throws ServiceUnavailableException {
final Map<String, Set<String>> topicToPartitions = partitions.stream().collect(Collectors.groupingBy(tp -> tp.getTimeline().getTopic(), Collectors.mapping(TimelinePartition::getPartition, Collectors.toSet())));
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final List<PartitionInfo> allKafkaPartitions = topicToPartitions.keySet().stream().map(consumer::partitionsFor).flatMap(Collection::stream).collect(Collectors.toList());
final List<TopicPartition> partitionsToQuery = allKafkaPartitions.stream().filter(pi -> topicToPartitions.get(pi.topic()).contains(KafkaCursor.toNakadiPartition(pi.partition()))).map(pi -> new TopicPartition(pi.topic(), pi.partition())).collect(Collectors.toList());
consumer.assign(partitionsToQuery);
consumer.seekToBeginning(partitionsToQuery);
final List<Long> begins = partitionsToQuery.stream().map(consumer::position).collect(toList());
consumer.seekToEnd(partitionsToQuery);
final List<Long> ends = partitionsToQuery.stream().map(consumer::position).collect(toList());
final List<Optional<PartitionStatistics>> result = new ArrayList<>(partitions.size());
for (final TimelinePartition tap : partitions) {
// Now search for an index.
final Optional<PartitionStatistics> itemResult = IntStream.range(0, partitionsToQuery.size()).filter(i -> {
final TopicPartition info = partitionsToQuery.get(i);
return info.topic().equals(tap.getTimeline().getTopic()) && info.partition() == KafkaCursor.toKafkaPartition(tap.getPartition());
}).mapToObj(indexFound -> (PartitionStatistics) new KafkaPartitionStatistics(tap.getTimeline(), partitionsToQuery.get(indexFound).partition(), begins.get(indexFound), ends.get(indexFound) - 1L)).findAny();
result.add(itemResult);
}
return result;
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.apache.kafka.clients.consumer.Consumer in project LogHub by fbacchella.
the class Kafka method configure.
@Override
public boolean configure(loghub.configuration.Properties properties) {
Properties props = new Properties();
URL[] brokersUrl = Helpers.stringsToUrl(brokers, port, "http", logger);
String resolvedBrokers = Arrays.stream(brokersUrl).map(i -> i.getHost() + ":" + i.getPort()).collect(Collectors.joining(","));
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, resolvedBrokers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, group);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class.getName());
consumer = new KafkaConsumer<>(props);
return super.configure(properties);
}
Aggregations