use of org.zalando.nakadi.domain.Timeline in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicEndStatistics.
@Override
public List<PartitionEndStatistics> loadTopicEndStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = newArrayList(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToEnd(kafkaTPs);
return backMap.entrySet().stream().map(e -> {
final TopicPartition tp = e.getKey();
final Timeline timeline = e.getValue();
return new KafkaPartitionEndStatistics(timeline, tp.partition(), consumer.position(tp) - 1);
}).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.zalando.nakadi.domain.Timeline in project nakadi by zalando.
the class KafkaTopicRepository method loadTopicStatistics.
@Override
public List<PartitionStatistics> loadTopicStatistics(final Collection<Timeline> timelines) throws ServiceUnavailableException {
try (Consumer<byte[], byte[]> consumer = kafkaFactory.getConsumer()) {
final Map<TopicPartition, Timeline> backMap = new HashMap<>();
for (final Timeline timeline : timelines) {
consumer.partitionsFor(timeline.getTopic()).stream().map(p -> new TopicPartition(p.topic(), p.partition())).forEach(tp -> backMap.put(tp, timeline));
}
final List<TopicPartition> kafkaTPs = new ArrayList<>(backMap.keySet());
consumer.assign(kafkaTPs);
consumer.seekToBeginning(kafkaTPs);
final long[] begins = kafkaTPs.stream().mapToLong(consumer::position).toArray();
consumer.seekToEnd(kafkaTPs);
final long[] ends = kafkaTPs.stream().mapToLong(consumer::position).toArray();
return IntStream.range(0, kafkaTPs.size()).mapToObj(i -> new KafkaPartitionStatistics(backMap.get(kafkaTPs.get(i)), kafkaTPs.get(i).partition(), begins[i], ends[i] - 1)).collect(toList());
} catch (final Exception e) {
throw new ServiceUnavailableException("Error occurred when fetching partitions offsets", e);
}
}
use of org.zalando.nakadi.domain.Timeline in project nakadi by zalando.
the class VersionZeroConverter method convert.
@Override
public NakadiCursor convert(final String eventTypeStr, final Cursor cursor) throws InternalNakadiException, NoSuchEventTypeException, ServiceUnavailableException, InvalidCursorException {
final String offset = cursor.getOffset();
if (Cursor.BEFORE_OLDEST_OFFSET.equalsIgnoreCase(offset)) {
final Timeline timeline = timelineService.getActiveTimelinesOrdered(eventTypeStr).get(0);
return timelineService.getTopicRepository(timeline).loadPartitionStatistics(timeline, cursor.getPartition()).orElseThrow(() -> new InvalidCursorException(PARTITION_NOT_FOUND)).getBeforeFirst();
} else if (!NUMBERS_ONLY_PATTERN.matcher(offset).matches()) {
throw new InvalidCursorException(CursorError.INVALID_OFFSET, cursor);
}
final Timeline timeline = timelineService.getAllTimelinesOrdered(eventTypeStr).get(0);
if (offset.startsWith("-")) {
return NakadiCursor.of(timeline, cursor.getPartition(), cursor.getOffset());
} else {
return NakadiCursor.of(timeline, cursor.getPartition(), StringUtils.leftPad(cursor.getOffset(), VERSION_ZERO_MIN_OFFSET_LENGTH, '0'));
}
}
use of org.zalando.nakadi.domain.Timeline in project nakadi by zalando.
the class VersionZeroConverter method convertBatched.
public List<NakadiCursor> convertBatched(final List<SubscriptionCursorWithoutToken> cursors) throws InvalidCursorException, InternalNakadiException, NoSuchEventTypeException, ServiceUnavailableException {
final NakadiCursor[] result = new NakadiCursor[cursors.size()];
for (int idx = 0; idx < cursors.size(); ++idx) {
final SubscriptionCursorWithoutToken cursor = cursors.get(idx);
if (Cursor.BEFORE_OLDEST_OFFSET.equalsIgnoreCase(cursor.getOffset())) {
// Preform begin checks afterwards to optimize calls
continue;
}
if (!NUMBERS_ONLY_PATTERN.matcher(cursor.getOffset()).matches()) {
throw new InvalidCursorException(CursorError.INVALID_OFFSET, cursor);
}
}
// now it is time for massive convert.
final LinkedHashMap<SubscriptionCursorWithoutToken, NakadiCursor> beginsToConvert = new LinkedHashMap<>();
final Map<SubscriptionCursorWithoutToken, Timeline> cursorTimelines = new HashMap<>();
final Map<TopicRepository, List<SubscriptionCursorWithoutToken>> repos = new HashMap<>();
for (int i = 0; i < result.length; ++i) {
if (null == result[i]) {
// cursor requires database hit
final SubscriptionCursorWithoutToken cursor = cursors.get(i);
final Timeline timeline = timelineService.getActiveTimelinesOrdered(cursor.getEventType()).get(0);
final TopicRepository topicRepo = timelineService.getTopicRepository(timeline);
beginsToConvert.put(cursor, null);
cursorTimelines.put(cursor, timeline);
repos.computeIfAbsent(topicRepo, k -> new ArrayList<>()).add(cursor);
}
}
for (final Map.Entry<TopicRepository, List<SubscriptionCursorWithoutToken>> entry : repos.entrySet()) {
final List<Optional<PartitionStatistics>> stats = entry.getKey().loadPartitionStatistics(entry.getValue().stream().map(scwt -> new TopicRepository.TimelinePartition(cursorTimelines.get(scwt), scwt.getPartition())).collect(Collectors.toList()));
for (int idx = 0; idx < entry.getValue().size(); ++idx) {
// Reinsert doesn't change the order
beginsToConvert.put(entry.getValue().get(idx), stats.get(idx).orElseThrow(() -> new InvalidCursorException(PARTITION_NOT_FOUND)).getBeforeFirst());
}
}
final Iterator<NakadiCursor> missingBegins = beginsToConvert.values().iterator();
return Stream.of(result).map(it -> null == it ? missingBegins.next() : it).collect(Collectors.toList());
}
use of org.zalando.nakadi.domain.Timeline in project nakadi by zalando.
the class VersionOneConverter method findCorrectTimelinedCursor.
private NakadiCursor findCorrectTimelinedCursor(final String eventType, final int order, final String partition, final String offset) throws InternalNakadiException, NoSuchEventTypeException, InvalidCursorException {
final List<Timeline> timelines = eventTypeCache.getTimelinesOrdered(eventType);
final Iterator<Timeline> timelineIterator = timelines.iterator();
Timeline timeline = null;
while (timelineIterator.hasNext()) {
final Timeline t = timelineIterator.next();
if (t.getOrder() == order) {
timeline = t;
break;
}
}
if (null == timeline) {
throw new InvalidCursorException(CursorError.UNAVAILABLE);
}
NakadiCursor cursor = NakadiCursor.of(timeline, partition, offset);
while (cursor.isLast()) {
// Will not check this call, because latest offset is not set for last timeline
timeline = timelineIterator.next();
cursor = NakadiCursor.of(timeline, partition, StaticStorageWorkerFactory.get(timeline).getBeforeFirstOffset());
}
return cursor;
}
Aggregations