use of org.zalando.nakadi.exceptions.ServiceUnavailableException in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicEndStatistics.
@Override
public List<PartitionEndStatistics> loadTopicEndStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = newArrayList(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToEnd(kafkaTPs);
return backMap.entrySet().stream().map(e -> {
final TopicPartition tp = e.getKey();
final Timeline timeline = e.getValue();
return new KafkaPartitionEndStatistics(timeline, tp.partition(), consumer.position(tp) - 1);
}).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.zalando.nakadi.exceptions.ServiceUnavailableException in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicStatistics.
@Override
public List<PartitionStatistics> loadTopicStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = new ArrayList<>(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToBeginning(kafkaTPs);
final long[] begins = kafkaTPs.stream().mapToLong(consumer::position).toArray();
consumer.seekToEnd(kafkaTPs);
final long[] ends = kafkaTPs.stream().mapToLong(consumer::position).toArray();
return IntStream.range(0, kafkaTPs.size()).mapToObj(i -> new KafkaPartitionStatistics(backMap.get(kafkaTPs.get(i)), kafkaTPs.get(i).partition(), begins[i], ends[i] - 1)).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.zalando.nakadi.exceptions.ServiceUnavailableException in project nakadi by zalando.
the class KafkaTopicRepository method loadPartitionStatistics.
@Override
public List<Optional<PartitionStatistics>> loadPartitionStatistics(final Collection<TimelinePartition> partitions) throws ServiceUnavailableException {
final Map<String, Set<String>> topicToPartitions = partitions.stream().collect(Collectors.groupingBy(tp -> tp.getTimeline().getTopic(), Collectors.mapping(TimelinePartition::getPartition, Collectors.toSet())));
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final List<PartitionInfo> allKafkaPartitions = topicToPartitions.keySet().stream().map(consumer::partitionsFor).flatMap(Collection::stream).collect(Collectors.toList());
final List<TopicPartition> partitionsToQuery = allKafkaPartitions.stream().filter(pi -> topicToPartitions.get(pi.topic()).contains(KafkaCursor.toNakadiPartition(pi.partition()))).map(pi -> new TopicPartition(pi.topic(), pi.partition())).collect(Collectors.toList());
consumer.assign(partitionsToQuery);
consumer.seekToBeginning(partitionsToQuery);
final List<Long> begins = partitionsToQuery.stream().map(consumer::position).collect(toList());
consumer.seekToEnd(partitionsToQuery);
final List<Long> ends = partitionsToQuery.stream().map(consumer::position).collect(toList());
final List<Optional<PartitionStatistics>> result = new ArrayList<>(partitions.size());
for (final TimelinePartition tap : partitions) {
// Now search for an index.
final Optional<PartitionStatistics> itemResult = IntStream.range(0, partitionsToQuery.size()).filter(i -> {
final TopicPartition info = partitionsToQuery.get(i);
return info.topic().equals(tap.getTimeline().getTopic()) && info.partition() == KafkaCursor.toKafkaPartition(tap.getPartition());
}).mapToObj(indexFound -> (PartitionStatistics) new KafkaPartitionStatistics(tap.getTimeline(), partitionsToQuery.get(indexFound).partition(), begins.get(indexFound), ends.get(indexFound) - 1L)).findAny();
result.add(itemResult);
}
return result;
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.zalando.nakadi.exceptions.ServiceUnavailableException in project nakadi by zalando.
the class TopicRepositoryHolder method createStoragePosition.
public Timeline.StoragePosition createStoragePosition(final Timeline timeline) {
try {
final Storage storage = timeline.getStorage();
final List<NakadiCursor> offsets = getTopicRepository(storage).loadTopicStatistics(Collections.singleton(timeline)).stream().map(PartitionStatistics::getLast).collect(Collectors.toList());
return getTopicRepositoryCreator(storage.getType()).createStoragePosition(offsets);
} catch (final ServiceUnavailableException e) {
throw new NakadiRuntimeException(e);
}
}
use of org.zalando.nakadi.exceptions.ServiceUnavailableException in project nakadi by zalando.
the class VersionZeroConverter method convertBatched.
public List<NakadiCursor> convertBatched(final List<SubscriptionCursorWithoutToken> cursors) throws InvalidCursorException, InternalNakadiException, NoSuchEventTypeException, ServiceUnavailableException {
final NakadiCursor[] result = new NakadiCursor[cursors.size()];
for (int idx = 0; idx < cursors.size(); ++idx) {
final SubscriptionCursorWithoutToken cursor = cursors.get(idx);
if (Cursor.BEFORE_OLDEST_OFFSET.equalsIgnoreCase(cursor.getOffset())) {
// Preform begin checks afterwards to optimize calls
continue;
}
if (!NUMBERS_ONLY_PATTERN.matcher(cursor.getOffset()).matches()) {
throw new InvalidCursorException(CursorError.INVALID_OFFSET, cursor);
}
}
// now it is time for massive convert.
final LinkedHashMap<SubscriptionCursorWithoutToken, NakadiCursor> beginsToConvert = new LinkedHashMap<>();
final Map<SubscriptionCursorWithoutToken, Timeline> cursorTimelines = new HashMap<>();
final Map<TopicRepository, List<SubscriptionCursorWithoutToken>> repos = new HashMap<>();
for (int i = 0; i < result.length; ++i) {
if (null == result[i]) {
// cursor requires database hit
final SubscriptionCursorWithoutToken cursor = cursors.get(i);
final Timeline timeline = timelineService.getActiveTimelinesOrdered(cursor.getEventType()).get(0);
final TopicRepository topicRepo = timelineService.getTopicRepository(timeline);
beginsToConvert.put(cursor, null);
cursorTimelines.put(cursor, timeline);
repos.computeIfAbsent(topicRepo, k -> new ArrayList<>()).add(cursor);
}
}
for (final Map.Entry<TopicRepository, List<SubscriptionCursorWithoutToken>> entry : repos.entrySet()) {
final List<Optional<PartitionStatistics>> stats = entry.getKey().loadPartitionStatistics(entry.getValue().stream().map(scwt -> new TopicRepository.TimelinePartition(cursorTimelines.get(scwt), scwt.getPartition())).collect(Collectors.toList()));
for (int idx = 0; idx < entry.getValue().size(); ++idx) {
// Reinsert doesn't change the order
beginsToConvert.put(entry.getValue().get(idx), stats.get(idx).orElseThrow(() -> new InvalidCursorException(PARTITION_NOT_FOUND)).getBeforeFirst());
}
}
final Iterator<NakadiCursor> missingBegins = beginsToConvert.values().iterator();
return Stream.of(result).map(it -> null == it ? missingBegins.next() : it).collect(Collectors.toList());
}
Aggregations