use of org.agrona.collections.Long2LongHashMap in project agrona by real-logic.
the class DeadlineTimerWheelTest method shouldBeAbleToIterateOverTimers.
@Test
void shouldBeAbleToIterateOverTimers() {
final long controlTimestamp = 0;
final DeadlineTimerWheel wheel = new DeadlineTimerWheel(TIME_UNIT, controlTimestamp, RESOLUTION, 8);
final long deadline1 = controlTimestamp + (15 * wheel.tickResolution());
final long deadline2 = controlTimestamp + ((15 + 7) * wheel.tickResolution());
final long id1 = wheel.scheduleTimer(deadline1);
final long id2 = wheel.scheduleTimer(deadline2);
final Long2LongHashMap timerIdByDeadlineMap = new Long2LongHashMap(Long.MIN_VALUE);
wheel.forEach(timerIdByDeadlineMap::put);
assertEquals(2, timerIdByDeadlineMap.size());
assertEquals(id1, timerIdByDeadlineMap.get(deadline1));
assertEquals(id2, timerIdByDeadlineMap.get(deadline2));
}
use of org.agrona.collections.Long2LongHashMap in project zilla by aklivity.
the class KafkaMergedFactory method newStream.
@Override
public MessageConsumer newStream(int msgTypeId, DirectBuffer buffer, int index, int length, MessageConsumer sender) {
final BeginFW begin = beginRO.wrap(buffer, index, index + length);
final long routeId = begin.routeId();
final long initialId = begin.streamId();
final long authorization = begin.authorization();
final long affinity = begin.affinity();
assert (initialId & 0x0000_0000_0000_0001L) != 0L;
final OctetsFW extension = begin.extension();
final ExtensionFW beginEx = extensionRO.tryWrap(extension.buffer(), extension.offset(), extension.limit());
final KafkaBeginExFW kafkaBeginEx = beginEx != null && beginEx.typeId() == kafkaTypeId ? kafkaBeginExRO.tryWrap(extension.buffer(), extension.offset(), extension.limit()) : null;
assert kafkaBeginEx != null;
assert kafkaBeginEx.kind() == KafkaBeginExFW.KIND_MERGED;
final KafkaMergedBeginExFW kafkaMergedBeginEx = kafkaBeginEx.merged();
final KafkaCapabilities capabilities = kafkaMergedBeginEx.capabilities().get();
final String16FW beginTopic = kafkaMergedBeginEx.topic();
final String topicName = beginTopic.asString();
final KafkaDeltaType deltaType = kafkaMergedBeginEx.deltaType().get();
MessageConsumer newStream = null;
final KafkaBindingConfig binding = supplyBinding.apply(routeId);
if (binding != null && binding.merged(topicName)) {
final long resolvedId = routeId;
final ArrayFW<KafkaOffsetFW> partitions = kafkaMergedBeginEx.partitions();
final KafkaOffsetFW partition = partitions.matchFirst(p -> p.partitionId() == -1L);
final long defaultOffset = partition != null ? partition.partitionOffset() : HISTORICAL.value();
final Long2LongHashMap initialOffsetsById = new Long2LongHashMap(-3L);
partitions.forEach(p -> {
final long partitionId = p.partitionId();
if (partitionId >= 0L) {
final long partitionOffset = p.partitionOffset();
initialOffsetsById.put(partitionId, partitionOffset);
}
});
newStream = new KafkaMergedStream(sender, routeId, initialId, affinity, authorization, topicName, resolvedId, capabilities, initialOffsetsById, defaultOffset, deltaType)::onMergedMessage;
}
return newStream;
}
use of org.agrona.collections.Long2LongHashMap in project agrona by real-logic.
the class DeadlineTimerWheelTest method shouldExpandTickAllocation.
@Test
void shouldExpandTickAllocation() {
final int tickAllocation = 4;
final int ticksPerWheel = 8;
final DeadlineTimerWheel wheel = new DeadlineTimerWheel(TIME_UNIT, 0, RESOLUTION, ticksPerWheel, tickAllocation);
final int timerCount = tickAllocation + 1;
final long[] timerIds = new long[timerCount];
for (int i = 0; i < timerCount; i++) {
timerIds[i] = wheel.scheduleTimer(i + 1L);
}
for (int i = 0; i < timerCount; i++) {
assertEquals(i + 1L, wheel.deadline(timerIds[i]));
}
final Long2LongHashMap deadlineByTimerId = new Long2LongHashMap(Long.MIN_VALUE);
final int expiredCount = wheel.poll(timerCount + 1L, (timeUnit, now, timerId) -> {
deadlineByTimerId.put(timerId, now);
return true;
}, timerCount);
assertEquals(timerCount, expiredCount);
assertEquals(timerCount, deadlineByTimerId.size());
}
Aggregations