Search in sources :

Example 6 with PositionImpl

use of io.pravega.client.stream.impl.PositionImpl in project pravega by pravega.

the class ReadTest method testEventPositions.

/**
 * This test performs concurrent writes, reads and position checks on a Stream. Readers are checkers exercising the
 * lazy construction of PositionImpl objects while the internal segmentOffsetUpdates list in EventStreamReaderImpl is
 * being updated due to new read events. This test generates enough events to make segmentOffsetUpdates list in
 * EventStreamReaderImpl to be filled and cleaned at least once. This test verifies the thread safety of the new
 * optimization in EventStreamReaderImpl to prevent generating segmentOffset maps on every event read, as well as
 * to check for the correctness of the segment offsets returned by PositionImpl.
 */
@Test(timeout = 60000)
public void testEventPositions() {
    String endpoint = "localhost";
    String streamName = "eventPositions";
    String readerGroup = "groupPositions";
    String scope = "scopePositions";
    // Generate enough events to make the internal segment offset update buffer in EventStreamReaderImpl to be
    // emptied and filled again.
    int eventsToWrite = 2000;
    BlockingQueue<Entry<Integer, PositionImpl>> readEventsPositions = new ArrayBlockingQueue<>(eventsToWrite);
    @Cleanup("shutdown") ScheduledExecutorService readersWritersAndCheckers = ExecutorServiceHelpers.newScheduledThreadPool(4, "readers-writers-checkers");
    AtomicInteger finishedProcesses = new AtomicInteger(0);
    int port = TestUtils.getAvailableListenPort();
    StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
    TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
    @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, NoOpScheduledExecutor.get());
    server.startListening();
    @Cleanup MockStreamManager streamManager = new MockStreamManager(scope, endpoint, port);
    @Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
    ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().groupRefreshTimeMillis(1000).stream(Stream.of(scope, streamName)).build();
    streamManager.createScope(scope);
    // Create a Stream with 2 segments.
    streamManager.createStream(scope, streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(2)).build());
    streamManager.createReaderGroup(readerGroup, groupConfig);
    JavaSerializer<String> serializer = new JavaSerializer<>();
    @Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
    @Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("reader1", readerGroup, serializer, ReaderConfig.builder().build());
    @Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("reader2", readerGroup, serializer, ReaderConfig.builder().build());
    // Leave some time for readers to re-balance the segments and acquire one each.
    Exceptions.handleInterrupted(() -> Thread.sleep(2000));
    // Start writers and readers in parallel.
    CompletableFuture reader1Future = CompletableFuture.runAsync(() -> {
        readAndQueueEvents(reader1, eventsToWrite, readEventsPositions);
        finishedProcesses.incrementAndGet();
    }, readersWritersAndCheckers);
    CompletableFuture reader2Future = CompletableFuture.runAsync(() -> {
        readAndQueueEvents(reader2, eventsToWrite, readEventsPositions);
        finishedProcesses.incrementAndGet();
    }, readersWritersAndCheckers);
    CompletableFuture writerFuture = CompletableFuture.runAsync(() -> {
        for (int i = 0; i < eventsToWrite; i++) {
            producer.writeEvent("segment1", "a");
            producer.writeEvent("segment2", "b");
            Exceptions.handleInterrupted(() -> Thread.sleep(1));
        }
        finishedProcesses.incrementAndGet();
    }, readersWritersAndCheckers);
    // This process access the positions read by the reader threads, which means that this thread is concurrently
    // accessing the shared segmentOffsetUpdates list, whereas readers are appending data to it.
    CompletableFuture checkOffsets = CompletableFuture.runAsync(() -> {
        // 1-char string is assumed to be the payload of events
        int sizeOfEvent = 16;
        while (finishedProcesses.get() < 2) {
            Entry<Integer, PositionImpl> element;
            try {
                element = readEventsPositions.poll(1, TimeUnit.MINUTES);
            } catch (InterruptedException e) {
                throw new RuntimeException(e);
            }
            int numberOfSegments = element.getValue().getOwnedSegmentsWithOffsets().size();
            assertEquals("Reader owning too many segments.", 1, numberOfSegments);
            // The segment position should increase by sizeOfEvent every event.
            long segmentPositionOffset = element.getValue().getOwnedSegmentsWithOffsets().values().iterator().next();
            assertEquals("Wrong event position", sizeOfEvent * element.getKey(), segmentPositionOffset);
        }
        finishedProcesses.incrementAndGet();
    }, readersWritersAndCheckers);
    // Wait for all futures to complete.
    CompletableFuture.allOf(writerFuture, reader1Future, reader2Future, checkOffsets).join();
    // Any failure reading, writing or checking positions will make this assertion to fail.
    assertEquals(finishedProcesses.get(), 4);
    ExecutorServiceHelpers.shutdown(readersWritersAndCheckers);
}
Also used : PositionImpl(io.pravega.client.stream.impl.PositionImpl) Cleanup(lombok.Cleanup) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) MockClientFactory(io.pravega.client.stream.mock.MockClientFactory) Entry(java.util.Map.Entry) ReadResultEntry(io.pravega.segmentstore.contracts.ReadResultEntry) CompletableFuture(java.util.concurrent.CompletableFuture) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MockStreamManager(io.pravega.client.stream.mock.MockStreamManager) Test(org.junit.Test)

Aggregations

PositionImpl (io.pravega.client.stream.impl.PositionImpl)6 Test (org.junit.Test)6 Position (io.pravega.client.stream.Position)4 CheckpointStoreException (io.pravega.controller.store.checkpoint.CheckpointStoreException)4 CheckpointStore (io.pravega.controller.store.checkpoint.CheckpointStore)3 SegmentWithRange (io.pravega.client.stream.impl.SegmentWithRange)2 CheckpointStoreFactory (io.pravega.controller.store.checkpoint.CheckpointStoreFactory)2 AssertExtensions (io.pravega.test.common.AssertExtensions)2 IOException (java.io.IOException)2 Collections (java.util.Collections)2 UUID (java.util.UUID)2 CompletableFuture (java.util.concurrent.CompletableFuture)2 Predicate (java.util.function.Predicate)2 Cleanup (lombok.Cleanup)2 CuratorFramework (org.apache.curator.framework.CuratorFramework)2 CuratorFrameworkFactory (org.apache.curator.framework.CuratorFrameworkFactory)2 ZKHelper (io.pravega.cli.admin.utils.ZKHelper)1 EventStreamClientFactory (io.pravega.client.EventStreamClientFactory)1 ConnectionPool (io.pravega.client.connection.impl.ConnectionPool)1 Segment (io.pravega.client.segment.impl.Segment)1