Search in sources :

Example 86 with StreamConfiguration

use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.

the class StreamsAndScopesManagementTest method testCreateUpdateDeleteStreamTag.

private void testCreateUpdateDeleteStreamTag(String scope) {
    final ImmutableSet<String> tagSet1 = ImmutableSet.of("t1", "t2", "t3");
    final ImmutableSet<String> tagSet2 = ImmutableSet.of("t3", "t4", "t5");
    // Create and Update Streams
    for (int j = 1; j <= TEST_MAX_STREAMS; j++) {
        StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(j)).build();
        final String stream = "stream" + j;
        log.info("creating a new stream in scope {}/{}", stream, scope);
        streamManager.createStream(scope, stream, config);
        log.info("updating the stream in scope {}/{}", stream, scope);
        streamManager.updateStream(scope, stream, config.toBuilder().tags(tagSet1).build());
        assertEquals(tagSet1, streamManager.getStreamTags(scope, stream));
    }
    // Check the size of streams with tagName t1
    assertEquals(TEST_MAX_STREAMS, newArrayList(streamManager.listStreams(scope, "t1")).size());
    // Check if the lists of tag t3 and t1 are equal
    assertEquals(newArrayList(streamManager.listStreams(scope, "t3")), newArrayList(streamManager.listStreams(scope, "t1")));
    // Update the streams with new tagSet
    List<CompletableFuture<Void>> futures = new ArrayList<>();
    for (int j = 1; j <= TEST_MAX_STREAMS; j++) {
        StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(j)).build();
        final String stream = "stream" + j;
        log.info("updating the stream tag scope {}/{}", stream, scope);
        futures.add(CompletableFuture.runAsync(() -> streamManager.updateStream(scope, stream, config.toBuilder().clearTags().tags(tagSet2).build())));
    }
    assertEquals(TEST_MAX_STREAMS, futures.size());
    CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
    // Check if the update was successfully done
    assertTrue(newArrayList(streamManager.listStreams(scope, "t1")).isEmpty());
    assertEquals(TEST_MAX_STREAMS, newArrayList(streamManager.listStreams(scope, "t4")).size());
    final int tagT3Size = newArrayList(streamManager.listStreams(scope, "t3")).size();
    final int tagT4Size = newArrayList(streamManager.listStreams(scope, "t4")).size();
    log.info("list size of t3 tags and t4 are {}/{}", tagT3Size, tagT4Size);
    assertEquals(tagT3Size, tagT4Size);
    // seal and delete stream
    for (int j = 1; j <= TEST_MAX_STREAMS; j++) {
        final String stream = "stream" + j;
        streamManager.sealStream(scope, stream);
        log.info("deleting the stream in scope {}/{}", stream, scope);
        streamManager.deleteStream(scope, stream);
    }
    // Check if list streams is updated.
    assertTrue(newArrayList(streamManager.listStreams(scope)).isEmpty());
}
Also used : CompletableFuture(java.util.concurrent.CompletableFuture) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ArrayList(java.util.ArrayList) Lists.newArrayList(com.google.common.collect.Lists.newArrayList)

Example 87 with StreamConfiguration

use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.

the class ControllerFailoverTest method createStream.

private void createStream(Controller controller, String scope, String stream, ScalingPolicy scalingPolicy) {
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(scalingPolicy).build();
    controller.createStream(scope, stream, config).join();
}
Also used : StreamConfiguration(io.pravega.client.stream.StreamConfiguration)

Example 88 with StreamConfiguration

use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.

the class ReaderGroupStreamCutUpdateTest method testStreamcutsUpdateInReaderGroup.

@Test(timeout = 60000)
public void testStreamcutsUpdateInReaderGroup() throws Exception {
    final String scope = "testStreamcutsUpdateInReaderGroup";
    final String stream = "myStream";
    final String readerGroupName = "testStreamcutsUpdateInReaderGroupRG";
    final int checkpointingIntervalMs = 2000;
    final int readerSleepInterval = 250;
    final int numEvents = 100;
    // First, create the stream.
    @Cleanup StreamManager streamManager = StreamManager.create(controllerURI);
    Assert.assertTrue(streamManager.createScope(scope));
    StreamConfiguration streamConfiguration = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(2)).build();
    streamManager.createStream(scope, stream, streamConfiguration);
    // Write some events in the stream.
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, ClientConfig.builder().controllerURI(controllerURI).build());
    writeEvents(clientFactory, stream, numEvents);
    // Read the events and test that positions are getting updated.
    ReaderGroupConfig readerGroupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, stream)).automaticCheckpointIntervalMillis(checkpointingIntervalMs).build();
    @Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(scope, controllerURI);
    readerGroupManager.createReaderGroup(readerGroupName, readerGroupConfig);
    ReaderGroup readerGroup = readerGroupManager.getReaderGroup(readerGroupName);
    @Cleanup EventStreamReader<Double> reader = clientFactory.createReader("myReader", readerGroupName, new JavaSerializer<>(), ReaderConfig.builder().build());
    Map<Stream, StreamCut> currentStreamcuts = readerGroup.getStreamCuts();
    EventRead<Double> eventRead;
    int lastIteration = 0, iteration = 0;
    int assertionFrequency = checkpointingIntervalMs / readerSleepInterval;
    do {
        eventRead = reader.readNextEvent(5000);
        // Check that the streamcuts are being updated periodically via automatic reader group checkpoints.
        if (iteration != lastIteration && iteration % assertionFrequency == 0) {
            log.info("Comparing streamcuts: {} / {} in iteration {}.", currentStreamcuts, readerGroup.getStreamCuts(), iteration);
            Assert.assertNotEquals(currentStreamcuts, readerGroup.getStreamCuts());
            currentStreamcuts = readerGroup.getStreamCuts();
            lastIteration = iteration;
        }
        Thread.sleep(readerSleepInterval);
        if (!eventRead.isCheckpoint()) {
            iteration++;
        }
    } while ((eventRead.isCheckpoint() || eventRead.getEvent() != null) && iteration < numEvents);
}
Also used : ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) StreamManager(io.pravega.client.admin.StreamManager) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Stream(io.pravega.client.stream.Stream) Test(org.junit.Test)

Example 89 with StreamConfiguration

use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.

the class RestoreBackUpDataRecoveryTest method testDurableDataLogFailRecoveryWatermarking.

/**
 * Tests the data recovery scenario with watermarking events.
 *  What test does, step by step:
 *  1. Starts Pravega locally with just 4 segment containers.
 *  2. Writes {@link #TOTAL_NUM_EVENTS} events to a segment with watermarks.
 *  3. Waits for all segments created to be flushed to the long term storage.
 *  4. Shuts down the controller, segment store and bookeeper/zookeeper.
 *  5. Creates back up of container metadata segment and its attribute segment before deleting them from the Long Term Storage .
 *  6. Starts 4 debug segment containers using a new bookeeper/zookeeper and the Long Term Storage.
 *  7. Re-creates the container metadata segments in DurableLog and lets them to be flushed to the Long Term Storage.
 *  8. Starts segment store and controller.
 *  9. Read all events and verify that all events are below the bounds.
 * @throws Exception    In case of an exception occurred while execution.
 */
@Test(timeout = 180000)
public void testDurableDataLogFailRecoveryWatermarking() throws Exception {
    int instanceId = 0;
    int bookieCount = 1;
    int containerCount = 4;
    String readerGroup = "rgTx";
    // Creating a long term storage only once here.
    this.storageFactory = new InMemoryStorageFactory(executorService());
    log.info("Created a long term storage.");
    // Start a new BK & ZK, segment store and controller
    @Cleanup PravegaRunner pravegaRunner = new PravegaRunner(instanceId++, bookieCount, containerCount, this.storageFactory);
    // Create a scope and a stream
    createScopeStream(pravegaRunner.controllerRunner.controller, SCOPE, STREAM1);
    // Create a client to write events.
    @Cleanup ClientRunner clientRunner = new ClientRunner(pravegaRunner.controllerRunner);
    // Create a writer
    @Cleanup TransactionalEventStreamWriter<Long> writer = clientRunner.clientFactory.createTransactionalEventWriter("writer1", STREAM1, new JavaSerializer<>(), EventWriterConfig.builder().transactionTimeoutTime(TRANSACTION_TIMEOUT.toMillis()).build());
    AtomicBoolean stopFlag = new AtomicBoolean(false);
    // write events
    CompletableFuture<Void> writerFuture = writeTxEvents(writer, stopFlag);
    // scale the stream several times so that we get complex positions
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(5)).build();
    Stream streamObj = Stream.of(SCOPE, STREAM1);
    scale(pravegaRunner.controllerRunner.controller, streamObj, config);
    // get watermarks
    LinkedBlockingQueue<Watermark> watermarks = getWatermarks(pravegaRunner, stopFlag, writerFuture);
    // Shut down the controller
    pravegaRunner.controllerRunner.close();
    // Flush DurableLog to Long Term Storage
    flushToStorage(pravegaRunner.segmentStoreRunner.serviceBuilder);
    // Shutdown SegmentStore
    pravegaRunner.segmentStoreRunner.close();
    // Shutdown BookKeeper & ZooKeeper
    pravegaRunner.bookKeeperRunner.close();
    log.info("SegmentStore, BookKeeper & ZooKeeper shutdown");
    // Get the long term storage from the running pravega instance
    @Cleanup Storage storage = new AsyncStorageWrapper(new RollingStorage(this.storageFactory.createSyncStorage(), new SegmentRollingPolicy(DEFAULT_ROLLING_SIZE)), executorService());
    Map<Integer, String> backUpMetadataSegments = ContainerRecoveryUtils.createBackUpMetadataSegments(storage, containerCount, executorService(), TIMEOUT).join();
    // start a new BookKeeper and ZooKeeper.
    pravegaRunner.bookKeeperRunner = new BookKeeperRunner(instanceId++, bookieCount);
    createBookKeeperLogFactory();
    log.info("Started a new BookKeeper and ZooKeeper.");
    // Recover segments
    runRecovery(containerCount, storage, backUpMetadataSegments);
    // Start a new segment store and controller
    pravegaRunner.restartControllerAndSegmentStore(this.storageFactory, this.dataLogFactory);
    log.info("Started segment store and controller again.");
    // Create the client with new controller.
    @Cleanup ClientRunner newClientRunner = new ClientRunner(pravegaRunner.controllerRunner);
    // read events and verify
    readVerifyEventsWithWatermarks(readerGroup, newClientRunner, streamObj, watermarks);
}
Also used : SegmentRollingPolicy(io.pravega.segmentstore.storage.SegmentRollingPolicy) RollingStorage(io.pravega.segmentstore.storage.rolling.RollingStorage) Cleanup(lombok.Cleanup) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Storage(io.pravega.segmentstore.storage.Storage) RollingStorage(io.pravega.segmentstore.storage.rolling.RollingStorage) AsyncStorageWrapper(io.pravega.segmentstore.storage.AsyncStorageWrapper) AtomicLong(java.util.concurrent.atomic.AtomicLong) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) InMemoryStorageFactory(io.pravega.segmentstore.storage.mocks.InMemoryStorageFactory) Stream(io.pravega.client.stream.Stream) Watermark(io.pravega.shared.watermarks.Watermark) Test(org.junit.Test)

Example 90 with StreamConfiguration

use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.

the class WatermarkingTest method watermarkTest.

@Test(timeout = 120000)
public void watermarkTest() throws Exception {
    Controller controller = PRAVEGA.getLocalController();
    String scope = "scope";
    String stream = "watermarkTest";
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(5)).build();
    ClientConfig clientConfig = ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build();
    @Cleanup StreamManager streamManager = StreamManager.create(clientConfig);
    streamManager.createScope(scope);
    streamManager.createStream(scope, stream, config);
    Stream streamObj = Stream.of(scope, stream);
    // create 2 writers
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig);
    JavaSerializer<Long> javaSerializer = new JavaSerializer<>();
    @Cleanup EventStreamWriter<Long> writer1 = clientFactory.createEventWriter(stream, javaSerializer, EventWriterConfig.builder().build());
    @Cleanup EventStreamWriter<Long> writer2 = clientFactory.createEventWriter(stream, javaSerializer, EventWriterConfig.builder().build());
    AtomicBoolean stopFlag = new AtomicBoolean(false);
    // write events
    CompletableFuture<Void> writer1Future = writeEvents(writer1, stopFlag);
    CompletableFuture<Void> writer2Future = writeEvents(writer2, stopFlag);
    // scale the stream several times so that we get complex positions
    scale(controller, streamObj, config);
    @Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(clientConfig);
    @Cleanup ClientFactoryImpl syncClientFactory = new ClientFactoryImpl(scope, new ControllerImpl(ControllerImplConfig.builder().clientConfig(clientConfig).build(), connectionFactory.getInternalExecutor()), connectionFactory);
    String markStream = NameUtils.getMarkStreamForStream(stream);
    @Cleanup RevisionedStreamClient<Watermark> watermarkReader = syncClientFactory.createRevisionedStreamClient(markStream, new WatermarkSerializer(), SynchronizerConfig.builder().build());
    LinkedBlockingQueue<Watermark> watermarks = new LinkedBlockingQueue<>();
    fetchWatermarks(watermarkReader, watermarks, stopFlag);
    AssertExtensions.assertEventuallyEquals(true, () -> watermarks.size() >= 2, 100000);
    stopFlag.set(true);
    writer1Future.join();
    writer2Future.join();
    // read events from the stream
    @Cleanup ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, controller, syncClientFactory);
    Watermark watermark0 = watermarks.take();
    Watermark watermark1 = watermarks.take();
    assertTrue(watermark0.getLowerTimeBound() <= watermark0.getUpperTimeBound());
    assertTrue(watermark1.getLowerTimeBound() <= watermark1.getUpperTimeBound());
    assertTrue(watermark0.getLowerTimeBound() < watermark1.getLowerTimeBound());
    Map<Segment, Long> positionMap0 = watermark0.getStreamCut().entrySet().stream().collect(Collectors.toMap(x -> new Segment(scope, stream, x.getKey().getSegmentId()), Map.Entry::getValue));
    Map<Segment, Long> positionMap1 = watermark1.getStreamCut().entrySet().stream().collect(Collectors.toMap(x -> new Segment(scope, stream, x.getKey().getSegmentId()), Map.Entry::getValue));
    StreamCut streamCutFirst = new StreamCutImpl(streamObj, positionMap0);
    StreamCut streamCutSecond = new StreamCutImpl(streamObj, positionMap1);
    Map<Stream, StreamCut> firstMarkStreamCut = Collections.singletonMap(streamObj, streamCutFirst);
    Map<Stream, StreamCut> secondMarkStreamCut = Collections.singletonMap(streamObj, streamCutSecond);
    // read from stream cut of first watermark
    String readerGroup = "watermarkTest-group";
    readerGroupManager.createReaderGroup(readerGroup, ReaderGroupConfig.builder().stream(streamObj).startingStreamCuts(firstMarkStreamCut).endingStreamCuts(secondMarkStreamCut).disableAutomaticCheckpoints().build());
    @Cleanup final EventStreamReader<Long> reader = clientFactory.createReader("myreader", readerGroup, javaSerializer, ReaderConfig.builder().build());
    EventRead<Long> event = reader.readNextEvent(10000L);
    TimeWindow currentTimeWindow = reader.getCurrentTimeWindow(streamObj);
    while (event.getEvent() != null && currentTimeWindow.getLowerTimeBound() == null && currentTimeWindow.getUpperTimeBound() == null) {
        event = reader.readNextEvent(10000L);
        currentTimeWindow = reader.getCurrentTimeWindow(streamObj);
    }
    assertNotNull(currentTimeWindow.getUpperTimeBound());
    // read all events and verify that all events are below the bounds
    while (event.getEvent() != null) {
        Long time = event.getEvent();
        log.info("timewindow = {} event = {}", currentTimeWindow, time);
        assertTrue(currentTimeWindow.getLowerTimeBound() == null || time >= currentTimeWindow.getLowerTimeBound());
        assertTrue(currentTimeWindow.getUpperTimeBound() == null || time <= currentTimeWindow.getUpperTimeBound());
        TimeWindow nextTimeWindow = reader.getCurrentTimeWindow(streamObj);
        assertTrue(currentTimeWindow.getLowerTimeBound() == null || nextTimeWindow.getLowerTimeBound() >= currentTimeWindow.getLowerTimeBound());
        assertTrue(currentTimeWindow.getUpperTimeBound() == null || nextTimeWindow.getUpperTimeBound() >= currentTimeWindow.getUpperTimeBound());
        currentTimeWindow = nextTimeWindow;
        event = reader.readNextEvent(10000L);
        if (event.isCheckpoint()) {
            event = reader.readNextEvent(10000L);
        }
    }
    assertNotNull(currentTimeWindow.getLowerTimeBound());
}
Also used : StreamCut(io.pravega.client.stream.StreamCut) EventStreamWriter(io.pravega.client.stream.EventStreamWriter) AssertExtensions(io.pravega.test.common.AssertExtensions) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Cleanup(lombok.Cleanup) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) EventRead(io.pravega.client.stream.EventRead) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) Stream(io.pravega.client.stream.Stream) Duration(java.time.Duration) Map(java.util.Map) TimeWindow(io.pravega.client.stream.TimeWindow) ClassRule(org.junit.ClassRule) Transaction(io.pravega.client.stream.Transaction) ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) WatermarkSerializer(io.pravega.client.watermark.WatermarkSerializer) CompletionException(java.util.concurrent.CompletionException) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Collectors(java.util.stream.Collectors) ControllerImplConfig(io.pravega.client.control.impl.ControllerImplConfig) Slf4j(lombok.extern.slf4j.Slf4j) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) Controller(io.pravega.client.control.impl.Controller) Futures(io.pravega.common.concurrent.Futures) Segment(io.pravega.client.segment.impl.Segment) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) StreamManager(io.pravega.client.admin.StreamManager) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) AtomicReference(java.util.concurrent.atomic.AtomicReference) StreamCutImpl(io.pravega.client.stream.impl.StreamCutImpl) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) RevisionedStreamClient(io.pravega.client.state.RevisionedStreamClient) SynchronizerConfig(io.pravega.client.state.SynchronizerConfig) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) TransactionalEventStreamWriter(io.pravega.client.stream.TransactionalEventStreamWriter) EventWriterConfig(io.pravega.client.stream.EventWriterConfig) NameUtils(io.pravega.shared.NameUtils) Iterator(java.util.Iterator) Assert.assertNotNull(org.junit.Assert.assertNotNull) Assert.assertTrue(org.junit.Assert.assertTrue) EventStreamReader(io.pravega.client.stream.EventStreamReader) Test(org.junit.Test) Watermark(io.pravega.shared.watermarks.Watermark) TimeUnit(java.util.concurrent.TimeUnit) AtomicLong(java.util.concurrent.atomic.AtomicLong) Assert.assertNull(org.junit.Assert.assertNull) SynchronizerClientFactory(io.pravega.client.SynchronizerClientFactory) ControllerImpl(io.pravega.client.control.impl.ControllerImpl) Revision(io.pravega.client.state.Revision) ReaderConfig(io.pravega.client.stream.ReaderConfig) TxnFailedException(io.pravega.client.stream.TxnFailedException) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Assert.assertEquals(org.junit.Assert.assertEquals) ClientConfig(io.pravega.client.ClientConfig) StreamCutImpl(io.pravega.client.stream.impl.StreamCutImpl) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Cleanup(lombok.Cleanup) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) Segment(io.pravega.client.segment.impl.Segment) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Stream(io.pravega.client.stream.Stream) ClientConfig(io.pravega.client.ClientConfig) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) ControllerImpl(io.pravega.client.control.impl.ControllerImpl) WatermarkSerializer(io.pravega.client.watermark.WatermarkSerializer) Controller(io.pravega.client.control.impl.Controller) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) TimeWindow(io.pravega.client.stream.TimeWindow) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) StreamManager(io.pravega.client.admin.StreamManager) AtomicLong(java.util.concurrent.atomic.AtomicLong) Watermark(io.pravega.shared.watermarks.Watermark) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Aggregations

StreamConfiguration (io.pravega.client.stream.StreamConfiguration)251 Test (org.junit.Test)207 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)112 Cleanup (lombok.Cleanup)86 HashMap (java.util.HashMap)64 Stream (io.pravega.client.stream.Stream)63 ArrayList (java.util.ArrayList)60 CompletableFuture (java.util.concurrent.CompletableFuture)60 SocketConnectionFactoryImpl (io.pravega.client.connection.impl.SocketConnectionFactoryImpl)54 List (java.util.List)52 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)51 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)49 UUID (java.util.UUID)48 ClientFactoryImpl (io.pravega.client.stream.impl.ClientFactoryImpl)47 Map (java.util.Map)46 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)45 ReaderGroupConfig (io.pravega.client.stream.ReaderGroupConfig)44 ClientConfig (io.pravega.client.ClientConfig)42 ReaderGroupManagerImpl (io.pravega.client.admin.impl.ReaderGroupManagerImpl)41 Controller (io.pravega.controller.stream.api.grpc.v1.Controller)40