Search in sources :

Example 6 with ClientFactoryImpl

use of io.pravega.client.stream.impl.ClientFactoryImpl in project pravega by pravega.

the class RevisionedStreamClientTest method testTimeoutWithStreamIterator.

@Test
public void testTimeoutWithStreamIterator() throws Exception {
    String scope = "scope";
    String stream = "stream";
    // Setup Environment
    PravegaNodeUri endpoint = new PravegaNodeUri("localhost", SERVICE_PORT);
    JavaSerializer<String> serializer = new JavaSerializer<>();
    @Cleanup MockConnectionFactoryImpl connectionFactory = new MockConnectionFactoryImpl();
    @Cleanup MockController controller = new MockController(endpoint.getEndpoint(), endpoint.getPort(), connectionFactory, false);
    createScopeAndStream(scope, stream, controller);
    MockSegmentStreamFactory streamFactory = new MockSegmentStreamFactory();
    // Setup mock
    SegmentOutputStreamFactory outFactory = mock(SegmentOutputStreamFactory.class);
    SegmentOutputStream out = mock(SegmentOutputStream.class);
    Segment segment = new Segment(scope, stream, 0);
    when(outFactory.createOutputStreamForSegment(eq(segment), any(), any(), any(DelegationTokenProvider.class))).thenReturn(out);
    SegmentInputStreamFactory inFactory = mock(SegmentInputStreamFactory.class);
    EventSegmentReader in = mock(EventSegmentReader.class);
    when(inFactory.createEventReaderForSegment(eq(segment), anyInt())).thenReturn(in);
    when(in.read(anyLong())).thenReturn(null).thenReturn(serializer.serialize("testData"));
    SegmentMetadataClientFactory metaFactory = mock(SegmentMetadataClientFactory.class);
    SegmentMetadataClient metaClient = mock(SegmentMetadataClient.class);
    when(metaFactory.createSegmentMetadataClient(any(Segment.class), any(DelegationTokenProvider.class))).thenReturn(metaClient);
    when(metaClient.getSegmentInfo()).thenReturn(CompletableFuture.completedFuture(new SegmentInfo(segment, 0, 30, false, System.currentTimeMillis())));
    @Cleanup SynchronizerClientFactory clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory, inFactory, streamFactory, streamFactory, metaFactory);
    @Cleanup RevisionedStreamClient<String> client = clientFactory.createRevisionedStreamClient(stream, serializer, SynchronizerConfig.builder().build());
    Iterator<Entry<Revision, String>> iterator = client.readFrom(new RevisionImpl(segment, 15, 1));
    assertTrue("True is expected since offset is less than end offset", iterator.hasNext());
    assertNotNull("Verify the entry is not null", iterator.next());
    verify(in, times(2)).read(anyLong());
}
Also used : MockSegmentStreamFactory(io.pravega.client.stream.mock.MockSegmentStreamFactory) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) Cleanup(lombok.Cleanup) Segment(io.pravega.client.segment.impl.Segment) SegmentMetadataClient(io.pravega.client.segment.impl.SegmentMetadataClient) SynchronizerClientFactory(io.pravega.client.SynchronizerClientFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) Entry(java.util.Map.Entry) PravegaNodeUri(io.pravega.shared.protocol.netty.PravegaNodeUri) SegmentOutputStreamFactory(io.pravega.client.segment.impl.SegmentOutputStreamFactory) SegmentOutputStream(io.pravega.client.segment.impl.SegmentOutputStream) EventSegmentReader(io.pravega.client.segment.impl.EventSegmentReader) SegmentMetadataClientFactory(io.pravega.client.segment.impl.SegmentMetadataClientFactory) MockConnectionFactoryImpl(io.pravega.client.stream.mock.MockConnectionFactoryImpl) MockController(io.pravega.client.stream.mock.MockController) SegmentInfo(io.pravega.client.segment.impl.SegmentInfo) SegmentInputStreamFactory(io.pravega.client.segment.impl.SegmentInputStreamFactory) DelegationTokenProvider(io.pravega.client.security.auth.DelegationTokenProvider) Test(org.junit.Test)

Example 7 with ClientFactoryImpl

use of io.pravega.client.stream.impl.ClientFactoryImpl in project pravega by pravega.

the class SynchronizerTest method testCreateStateSynchronizerError.

@Test(timeout = 20000)
public void testCreateStateSynchronizerError() {
    String streamName = "streamName";
    String scope = "scope";
    Controller controller = mock(Controller.class);
    @Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, ClientConfig.builder().build());
    // Simulate a sealed stream.
    CompletableFuture<StreamSegments> result = new CompletableFuture<>();
    result.complete(new StreamSegments(new TreeMap<>()));
    when(controller.getCurrentSegments(scope, streamName)).thenReturn(result);
    AssertExtensions.assertThrows(InvalidStreamException.class, () -> clientFactory.createStateSynchronizer(streamName, new JavaSerializer<>(), new JavaSerializer<>(), SynchronizerConfig.builder().build()));
    result = new CompletableFuture<>();
    result.completeExceptionally(new RuntimeException("Controller exception"));
    when(controller.getCurrentSegments(scope, streamName)).thenReturn(result);
    AssertExtensions.assertThrows(InvalidStreamException.class, () -> clientFactory.createStateSynchronizer(streamName, new JavaSerializer<>(), new JavaSerializer<>(), SynchronizerConfig.builder().build()));
}
Also used : ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) CompletableFuture(java.util.concurrent.CompletableFuture) Controller(io.pravega.client.control.impl.Controller) TreeMap(java.util.TreeMap) Cleanup(lombok.Cleanup) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) StreamSegments(io.pravega.client.stream.impl.StreamSegments) Test(org.junit.Test)

Example 8 with ClientFactoryImpl

use of io.pravega.client.stream.impl.ClientFactoryImpl in project pravega by pravega.

the class ReadWriteTest method readWriteTest.

@Test(timeout = 60000)
public void readWriteTest() throws InterruptedException, ExecutionException {
    String scope = "testMultiReaderWriterScope";
    String readerGroupName = "testMultiReaderWriterReaderGroup";
    // 20  readers -> 20 stream segments ( to have max read parallelism)
    ScalingPolicy scalingPolicy = ScalingPolicy.fixed(20);
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(scalingPolicy).build();
    eventsReadFromPravega = new ConcurrentLinkedQueue<>();
    // data used by each of the writers.
    eventData = new AtomicLong();
    // used by readers to maintain a count of events.
    eventReadCount = new AtomicLong();
    stopReadFlag = new AtomicBoolean(false);
    ClientConfig clientConfig = ClientConfig.builder().build();
    try (ConnectionPool cp = new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig));
        StreamManager streamManager = new StreamManagerImpl(controller, cp)) {
        // create a scope
        Boolean createScopeStatus = streamManager.createScope(scope);
        log.info("Create scope status {}", createScopeStatus);
        // create a stream
        Boolean createStreamStatus = streamManager.createStream(scope, STREAM_NAME, config);
        log.info("Create stream status {}", createStreamStatus);
    }
    try (ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
        ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
        ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, controller, clientFactory)) {
        // start writing events to the stream
        log.info("Creating {} writers", NUM_WRITERS);
        List<CompletableFuture<Void>> writerList = new ArrayList<>();
        for (int i = 0; i < NUM_WRITERS; i++) {
            log.info("Starting writer{}", i);
            writerList.add(startNewWriter(eventData, clientFactory));
        }
        // create a reader group
        log.info("Creating Reader group : {}", readerGroupName);
        readerGroupManager.createReaderGroup(readerGroupName, ReaderGroupConfig.builder().stream(Stream.of(scope, STREAM_NAME)).build());
        log.info("Reader group name {} ", readerGroupManager.getReaderGroup(readerGroupName).getGroupName());
        log.info("Reader group scope {}", readerGroupManager.getReaderGroup(readerGroupName).getScope());
        // create readers
        log.info("Creating {} readers", NUM_READERS);
        List<CompletableFuture<Void>> readerList = new ArrayList<>();
        String readerName = "reader" + RandomFactory.create().nextInt(Integer.MAX_VALUE);
        // start reading events
        for (int i = 0; i < NUM_READERS; i++) {
            log.info("Starting reader{}", i);
            readerList.add(startNewReader(readerName + i, clientFactory, readerGroupName, eventsReadFromPravega, eventData, eventReadCount, stopReadFlag));
        }
        // wait for writers completion
        Futures.allOf(writerList).get();
        ExecutorServiceHelpers.shutdown(writerPool);
        // set stop read flag to true
        stopReadFlag.set(true);
        // wait for readers completion
        Futures.allOf(readerList).get();
        ExecutorServiceHelpers.shutdown(readerPool);
        // delete readergroup
        log.info("Deleting readergroup {}", readerGroupName);
        readerGroupManager.deleteReaderGroup(readerGroupName);
    }
    log.info("All writers have stopped. Setting Stop_Read_Flag. Event Written Count:{}, Event Read " + "Count: {}", eventData.get(), eventsReadFromPravega.size());
    assertEquals(TOTAL_NUM_EVENTS, eventsReadFromPravega.size());
    // check unique events.
    assertEquals(TOTAL_NUM_EVENTS, new TreeSet<>(eventsReadFromPravega).size());
    // seal the stream
    CompletableFuture<Boolean> sealStreamStatus = controller.sealStream(scope, STREAM_NAME);
    log.info("Sealing stream {}", STREAM_NAME);
    assertTrue(sealStreamStatus.get());
    // delete the stream
    CompletableFuture<Boolean> deleteStreamStatus = controller.deleteStream(scope, STREAM_NAME);
    log.info("Deleting stream {}", STREAM_NAME);
    assertTrue(deleteStreamStatus.get());
    // delete the  scope
    CompletableFuture<Boolean> deleteScopeStatus = controller.deleteScope(scope);
    log.info("Deleting scope {}", scope);
    assertTrue(deleteScopeStatus.get());
    log.info("Read write test succeeds");
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) ArrayList(java.util.ArrayList) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) AtomicLong(java.util.concurrent.atomic.AtomicLong) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) CompletableFuture(java.util.concurrent.CompletableFuture) StreamManager(io.pravega.client.admin.StreamManager) TreeSet(java.util.TreeSet) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ClientConfig(io.pravega.client.ClientConfig) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Test(org.junit.Test)

Example 9 with ClientFactoryImpl

use of io.pravega.client.stream.impl.ClientFactoryImpl in project pravega by pravega.

the class LargeEventTest method testReadWriteWithConnectionReconnect.

@Test(timeout = 60000)
public void testReadWriteWithConnectionReconnect() throws ExecutionException, InterruptedException, TimeoutException {
    String readerGroupName = "testLargeEventReconnectReaderGroup";
    String streamName = "ConnectionReconnect";
    StreamConfiguration config = getStreamConfiguration(NUM_READERS);
    createScopeStream(SCOPE_NAME, streamName, config);
    int numEvents = 1;
    Map<Integer, List<ByteBuffer>> data = generateEventData(NUM_WRITERS, 0, numEvents, LARGE_EVENT_SIZE);
    merge(eventsWrittenToPravega, data);
    Queue<ByteBuffer> reads = new ConcurrentLinkedQueue<>();
    // Close the connection after two `send` calls.
    AtomicInteger sendCount = new AtomicInteger(0);
    Supplier<Boolean> predicate = () -> sendCount.getAndIncrement() == CLOSE_WRITE_COUNT;
    AtomicReference<Boolean> latch = new AtomicReference<>(true);
    try (ConnectionExporter connectionFactory = new ConnectionExporter(ClientConfig.builder().build(), latch, null, predicate);
        ClientFactoryImpl clientFactory = new ClientFactoryImpl(SCOPE_NAME, controller, connectionFactory);
        ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(SCOPE_NAME, controller, clientFactory)) {
        // Start writing events to the stream.
        val writers = createEventWriters(streamName, NUM_WRITERS, clientFactory, eventsWrittenToPravega);
        // Create a ReaderGroup.
        createReaderGroup(readerGroupName, readerGroupManager, streamName);
        // Create Readers.
        val readers = createEventReaders(NUM_READERS, clientFactory, readerGroupName, reads);
        Futures.allOf(writers).get();
        stopReadFlag.set(true);
        Futures.allOf(readers).get();
        readerGroupManager.deleteReaderGroup(readerGroupName);
    }
    validateCleanUp(streamName);
    validateEventReads(reads, eventsWrittenToPravega);
}
Also used : lombok.val(lombok.val) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) AtomicReference(java.util.concurrent.atomic.AtomicReference) ByteBuffer(java.nio.ByteBuffer) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) List(java.util.List) ArrayList(java.util.ArrayList) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Test(org.junit.Test)

Example 10 with ClientFactoryImpl

use of io.pravega.client.stream.impl.ClientFactoryImpl in project pravega by pravega.

the class LargeEventTest method readWriteCycle.

private ConcurrentLinkedQueue<ByteBuffer> readWriteCycle(String streamName, String readerGroupName, Map<Integer, List<ByteBuffer>> writes) throws ExecutionException, InterruptedException {
    // Each read-cycle should clear these objects.
    stopReadFlag.set(false);
    // Reads should be clear, but not writes.
    eventReadCount.set(0);
    // The reads should return all events written, and not just the to be written events (writes).
    ConcurrentLinkedQueue<ByteBuffer> reads = new ConcurrentLinkedQueue<>();
    try (ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
        ClientFactoryImpl clientFactory = new ClientFactoryImpl(SCOPE_NAME, controller, connectionFactory);
        ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(SCOPE_NAME, controller, clientFactory)) {
        // Start writing events to the stream.
        val writers = createEventWriters(streamName, writes.size(), clientFactory, writes);
        // Create a ReaderGroup.
        createReaderGroup(readerGroupName, readerGroupManager, streamName);
        // Create Readers.
        val readers = createEventReaders(NUM_READERS, clientFactory, readerGroupName, reads);
        Futures.allOf(writers).get();
        stopReadFlag.set(true);
        Futures.allOf(readers).get();
        log.info("Deleting ReaderGroup: {}", readerGroupName);
        readerGroupManager.deleteReaderGroup(readerGroupName);
    }
    return reads;
}
Also used : lombok.val(lombok.val) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) ConcurrentLinkedQueue(java.util.concurrent.ConcurrentLinkedQueue) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) ByteBuffer(java.nio.ByteBuffer) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl)

Aggregations

ClientFactoryImpl (io.pravega.client.stream.impl.ClientFactoryImpl)80 Test (org.junit.Test)63 Cleanup (lombok.Cleanup)59 SocketConnectionFactoryImpl (io.pravega.client.connection.impl.SocketConnectionFactoryImpl)54 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)50 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)44 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)40 ReaderGroupManagerImpl (io.pravega.client.admin.impl.ReaderGroupManagerImpl)39 ClientConfig (io.pravega.client.ClientConfig)37 Stream (io.pravega.client.stream.Stream)27 Controller (io.pravega.client.control.impl.Controller)25 StreamImpl (io.pravega.client.stream.impl.StreamImpl)24 HashMap (java.util.HashMap)23 ReaderGroup (io.pravega.client.stream.ReaderGroup)19 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)19 JavaSerializer (io.pravega.client.stream.impl.JavaSerializer)19 EventWriterConfig (io.pravega.client.stream.EventWriterConfig)18 URI (java.net.URI)18 CompletableFuture (java.util.concurrent.CompletableFuture)18 ControllerImpl (io.pravega.client.control.impl.ControllerImpl)17