Search in sources :

Example 61 with Controller

use of io.pravega.client.control.impl.Controller in project pravega by pravega.

the class EndToEndAutoScaleUpWithTxnTest method main.

public static void main(String[] args) throws Exception {
    try {
        @Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
        int port = Config.SERVICE_PORT;
        @Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port);
        Controller controller = controllerWrapper.getController();
        controllerWrapper.getControllerService().createScope(NameUtils.INTERNAL_SCOPE_NAME, 0L).get();
        @Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
        @Cleanup ClientFactoryImpl internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, connectionFactory);
        @Cleanup("shutdownNow") val executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "test");
        @Cleanup ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
        serviceBuilder.initialize();
        StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
        TableStore tableStore = serviceBuilder.createTableStoreService();
        @Cleanup AutoScaleMonitor autoScaleMonitor = new AutoScaleMonitor(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).build());
        @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, false, "localhost", 12345, store, tableStore, autoScaleMonitor.getStatsRecorder(), autoScaleMonitor.getTableSegmentStatsRecorder(), null, null, null, true, serviceBuilder.getLowPriorityExecutor(), Config.TLS_PROTOCOL_VERSION.toArray(new String[Config.TLS_PROTOCOL_VERSION.size()]));
        server.startListening();
        controllerWrapper.awaitRunning();
        controllerWrapper.getControllerService().createScope("test", 0L).get();
        controller.createStream("test", "test", CONFIG).get();
        @Cleanup MockClientFactory clientFactory = new MockClientFactory("test", controller, internalCF.getConnectionPool());
        // Mocking pravega service by putting scale up and scale down requests for the stream
        EventWriterConfig writerConfig = EventWriterConfig.builder().transactionTimeoutTime(30000).build();
        TransactionalEventStreamWriter<String> test = clientFactory.createTransactionalEventWriter("writer", "test", new UTF8StringSerializer(), writerConfig);
        // region Successful commit tests
        Transaction<String> txn1 = test.beginTxn();
        txn1.writeEvent("1");
        txn1.flush();
        Map<Double, Double> map = new HashMap<>();
        map.put(0.0, 1.0 / 3.0);
        map.put(1.0 / 3.0, 2.0 / 3.0);
        map.put(2.0 / 3.0, 1.0);
        Stream stream = new StreamImpl("test", "test");
        controller.startScale(stream, Collections.singletonList(0L), map).get();
        Transaction<String> txn2 = test.beginTxn();
        txn2.writeEvent("2");
        txn2.flush();
        txn2.commit();
        txn1.commit();
        Thread.sleep(1000);
        @Cleanup ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl("test", controller, clientFactory);
        readerGroupManager.createReaderGroup("readergrp", ReaderGroupConfig.builder().stream("test/test").build());
        final EventStreamReader<String> reader = clientFactory.createReader("1", "readergrp", new JavaSerializer<>(), ReaderConfig.builder().build());
        String event1 = reader.readNextEvent(SECONDS.toMillis(60)).getEvent();
        String event2 = reader.readNextEvent(SECONDS.toMillis(60)).getEvent();
        assert event1.equals("1");
        assert event2.equals("2");
        final AtomicBoolean done = new AtomicBoolean(false);
        startWriter(test, done);
        Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
            if (streamSegments.getSegments().stream().anyMatch(x -> NameUtils.getEpoch(x.getSegmentId()) > 5)) {
                System.err.println("Success");
                log.info("Success");
                System.exit(0);
            } else {
                throw new NotDoneException();
            }
        }), executor).exceptionally(e -> {
            System.err.println("Failure");
            log.error("Failure");
            System.exit(1);
            return null;
        }).get();
    } catch (Throwable e) {
        System.err.print("Test failed with exception: " + e.getMessage());
        log.error("Test failed with exception: {}", e);
        System.exit(-1);
    }
    System.exit(0);
}
Also used : ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) StreamImpl(io.pravega.client.stream.impl.StreamImpl) Retry(io.pravega.common.util.Retry) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) AutoScaleMonitor(io.pravega.segmentstore.server.host.stat.AutoScaleMonitor) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Cleanup(lombok.Cleanup) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) JavaSerializer(io.pravega.client.stream.impl.JavaSerializer) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ServiceBuilderConfig(io.pravega.segmentstore.server.store.ServiceBuilderConfig) ServiceBuilder(io.pravega.segmentstore.server.store.ServiceBuilder) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) TestingServerStarter(io.pravega.test.common.TestingServerStarter) Stream(io.pravega.client.stream.Stream) Map(java.util.Map) TestingServer(org.apache.curator.test.TestingServer) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) Transaction(io.pravega.client.stream.Transaction) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) TransactionalEventStreamWriter(io.pravega.client.stream.TransactionalEventStreamWriter) ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) EventWriterConfig(io.pravega.client.stream.EventWriterConfig) AutoScalerConfig(io.pravega.segmentstore.server.host.stat.AutoScalerConfig) NameUtils(io.pravega.shared.NameUtils) lombok.val(lombok.val) EventStreamReader(io.pravega.client.stream.EventStreamReader) Slf4j(lombok.extern.slf4j.Slf4j) Config(io.pravega.controller.util.Config) ReaderConfig(io.pravega.client.stream.ReaderConfig) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Collections(java.util.Collections) Controller(io.pravega.client.control.impl.Controller) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) UTF8StringSerializer(io.pravega.client.stream.impl.UTF8StringSerializer) MockClientFactory(io.pravega.client.stream.mock.MockClientFactory) SECONDS(java.util.concurrent.TimeUnit.SECONDS) ClientConfig(io.pravega.client.ClientConfig) AutoScaleMonitor(io.pravega.segmentstore.server.host.stat.AutoScaleMonitor) HashMap(java.util.HashMap) Cleanup(lombok.Cleanup) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) MockClientFactory(io.pravega.client.stream.mock.MockClientFactory) ServiceBuilder(io.pravega.segmentstore.server.store.ServiceBuilder) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) Stream(io.pravega.client.stream.Stream) UTF8StringSerializer(io.pravega.client.stream.impl.UTF8StringSerializer) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) TestingServer(org.apache.curator.test.TestingServer) lombok.val(lombok.val) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) TestingServerStarter(io.pravega.test.common.TestingServerStarter) Controller(io.pravega.client.control.impl.Controller) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) EventWriterConfig(io.pravega.client.stream.EventWriterConfig) StreamImpl(io.pravega.client.stream.impl.StreamImpl)

Example 62 with Controller

use of io.pravega.client.control.impl.Controller in project pravega by pravega.

the class ControllerWatermarkingTest method watermarkTest.

@Test(timeout = 60000)
public void watermarkTest() throws Exception {
    Controller controller = controllerWrapper.getController();
    String scope = "scope";
    String stream = "stream";
    controller.createScope(scope).join();
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
    controller.createStream(scope, stream, config).join();
    String markStream = NameUtils.getMarkStreamForStream(stream);
    Stream streamObj = new StreamImpl(scope, stream);
    WriterPosition pos1 = WriterPosition.builder().segments(Collections.singletonMap(new Segment(scope, stream, 0L), 10L)).build();
    WriterPosition pos2 = WriterPosition.builder().segments(Collections.singletonMap(new Segment(scope, stream, 0L), 20L)).build();
    controller.noteTimestampFromWriter("1", streamObj, 1L, pos1).join();
    controller.noteTimestampFromWriter("2", streamObj, 2L, pos2).join();
    @Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
    @Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
    @Cleanup RevisionedStreamClient<Watermark> reader = clientFactory.createRevisionedStreamClient(markStream, new WatermarkSerializer(), SynchronizerConfig.builder().build());
    AssertExtensions.assertEventuallyEquals(true, () -> {
        Iterator<Entry<Revision, Watermark>> watermarks = reader.readFrom(reader.fetchOldestRevision());
        return watermarks.hasNext();
    }, 30000);
    Iterator<Entry<Revision, Watermark>> watermarks = reader.readFrom(reader.fetchOldestRevision());
    Watermark watermark = watermarks.next().getValue();
    assertEquals(watermark.getLowerTimeBound(), 1L);
    assertTrue(watermark.getStreamCut().entrySet().stream().anyMatch(x -> x.getKey().getSegmentId() == 0L && x.getValue() == 20L));
    controller.sealStream(scope, stream).join();
    controller.deleteStream(scope, stream).join();
    AssertExtensions.assertFutureThrows("Mark Stream should not exist", controller.getCurrentSegments(scope, markStream), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
}
Also used : Segment(io.pravega.client.segment.impl.Segment) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) StreamImpl(io.pravega.client.stream.impl.StreamImpl) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) AssertExtensions(io.pravega.test.common.AssertExtensions) Exceptions(io.pravega.common.Exceptions) Cleanup(lombok.Cleanup) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ServiceBuilderConfig(io.pravega.segmentstore.server.store.ServiceBuilderConfig) ServiceBuilder(io.pravega.segmentstore.server.store.ServiceBuilder) StoreException(io.pravega.controller.store.stream.StoreException) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) TestingServerStarter(io.pravega.test.common.TestingServerStarter) Stream(io.pravega.client.stream.Stream) After(org.junit.After) TestingServer(org.apache.curator.test.TestingServer) RevisionedStreamClient(io.pravega.client.state.RevisionedStreamClient) SynchronizerConfig(io.pravega.client.state.SynchronizerConfig) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) Before(org.junit.Before) NameUtils(io.pravega.shared.NameUtils) WatermarkSerializer(io.pravega.client.watermark.WatermarkSerializer) Iterator(java.util.Iterator) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) WriterPosition(io.pravega.client.stream.impl.WriterPosition) Watermark(io.pravega.shared.watermarks.Watermark) Entry(java.util.Map.Entry) Revision(io.pravega.client.state.Revision) TestUtils(io.pravega.test.common.TestUtils) ControllerWrapper(io.pravega.test.integration.demo.ControllerWrapper) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Controller(io.pravega.client.control.impl.Controller) Assert.assertEquals(org.junit.Assert.assertEquals) ClientConfig(io.pravega.client.ClientConfig) WriterPosition(io.pravega.client.stream.impl.WriterPosition) WatermarkSerializer(io.pravega.client.watermark.WatermarkSerializer) Controller(io.pravega.client.control.impl.Controller) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) Cleanup(lombok.Cleanup) Segment(io.pravega.client.segment.impl.Segment) StoreException(io.pravega.controller.store.stream.StoreException) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) Entry(java.util.Map.Entry) StreamImpl(io.pravega.client.stream.impl.StreamImpl) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Stream(io.pravega.client.stream.Stream) Watermark(io.pravega.shared.watermarks.Watermark) Test(org.junit.Test)

Example 63 with Controller

use of io.pravega.client.control.impl.Controller in project pravega by pravega.

the class StreamCutsTest method testReaderGroupCuts.

@Test(timeout = 40000)
public void testReaderGroupCuts() throws Exception {
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
    Controller controller = controllerWrapper.getController();
    controllerWrapper.getControllerService().createScope("test", 0L).get();
    controller.createStream("test", "test", config).get();
    @Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
    @Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
    writer.writeEvent("0", "fpj was here").get();
    writer.writeEvent("0", "fpj was here again").get();
    @Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory);
    groupManager.createReaderGroup("cuts", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").groupRefreshTimeMillis(0).build());
    @Cleanup ReaderGroup readerGroup = groupManager.getReaderGroup("cuts");
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "cuts", new JavaSerializer<>(), ReaderConfig.builder().initialAllocationDelay(0).build());
    EventRead<String> firstEvent = reader.readNextEvent(5000);
    assertNotNull(firstEvent.getEvent());
    assertEquals("fpj was here", firstEvent.getEvent());
    readerGroup.initiateCheckpoint("cp1", executor);
    EventRead<String> cpEvent = reader.readNextEvent(5000);
    assertEquals("cp1", cpEvent.getCheckpointName());
    EventRead<String> secondEvent = reader.readNextEvent(5000);
    assertNotNull(secondEvent.getEvent());
    assertEquals("fpj was here again", secondEvent.getEvent());
    Map<Stream, StreamCut> cuts = readerGroup.getStreamCuts();
    validateCuts(readerGroup, cuts, Collections.singleton(getQualifiedStreamSegmentName("test", "test", 0L)));
    // Scale the stream to verify that we get more segments in the cut.
    Stream stream = Stream.of("test", "test");
    Map<Double, Double> map = new HashMap<>();
    map.put(0.0, 0.5);
    map.put(0.5, 1.0);
    Boolean result = controller.scaleStream(stream, Collections.singletonList(0L), map, executor).getFuture().get();
    assertTrue(result);
    log.info("Finished 1st scaling");
    writer.writeEvent("0", "fpj was here again0").get();
    writer.writeEvent("1", "fpj was here again1").get();
    EventRead<String> eosEvent = reader.readNextEvent(100);
    // Reader does not yet see the data becasue there has been no CP
    assertNull(eosEvent.getEvent());
    CompletableFuture<Checkpoint> checkpoint = readerGroup.initiateCheckpoint("cp2", executor);
    cpEvent = reader.readNextEvent(100);
    EventRead<String> event0 = reader.readNextEvent(100);
    EventRead<String> event1 = reader.readNextEvent(100);
    cuts = checkpoint.get(5, TimeUnit.SECONDS).asImpl().getPositions();
    // Validate the reader did not release the segments before the checkpoint.
    // This is important because it means that once the checkpoint is initiated no segments change readers.
    Set<String> segmentNames = ImmutableSet.of(getQualifiedStreamSegmentName("test", "test", computeSegmentId(0, 0)));
    validateCuts(readerGroup, cuts, segmentNames);
    CompletableFuture<Map<Stream, StreamCut>> futureCuts = readerGroup.generateStreamCuts(executor);
    EventRead<String> emptyEvent = reader.readNextEvent(100);
    cuts = futureCuts.get();
    segmentNames = ImmutableSet.of(getQualifiedStreamSegmentName("test", "test", computeSegmentId(1, 1)), getQualifiedStreamSegmentName("test", "test", computeSegmentId(2, 1)));
    validateCuts(readerGroup, cuts, segmentNames);
    // Scale down to verify that the number drops back.
    map = new HashMap<>();
    map.put(0.0, 1.0);
    ArrayList<Long> toSeal = new ArrayList<>();
    toSeal.add(computeSegmentId(1, 1));
    toSeal.add(computeSegmentId(2, 1));
    result = controller.scaleStream(stream, Collections.unmodifiableList(toSeal), map, executor).getFuture().get();
    assertTrue(result);
    log.info("Finished 2nd scaling");
    writer.writeEvent("0", "fpj was here again2").get();
    // Reader sees the segment is empty
    emptyEvent = reader.readNextEvent(100);
    assertNull(emptyEvent.getEvent());
    checkpoint = readerGroup.initiateCheckpoint("cp3", executor);
    cpEvent = reader.readNextEvent(100);
    assertEquals("cp3", cpEvent.getCheckpointName());
    // Reader releases segments here
    event0 = reader.readNextEvent(5000);
    assertTrue(event0.getEvent().endsWith("2"));
    cuts = readerGroup.getStreamCuts();
    long three = computeSegmentId(3, 2);
    validateCuts(readerGroup, cuts, Collections.singleton(getQualifiedStreamSegmentName("test", "test", three)));
    // Scale up to 4 segments again.
    map = new HashMap<>();
    map.put(0.0, 0.25);
    map.put(0.25, 0.5);
    map.put(0.5, 0.75);
    map.put(0.75, 1.0);
    result = controller.scaleStream(stream, Collections.singletonList(three), map, executor).getFuture().get();
    assertTrue(result);
    log.info("Finished 3rd scaling");
    writer.writeEvent("0", "fpj was here again3").get();
    // Reader sees the segment is empty
    emptyEvent = reader.readNextEvent(100);
    assertNull(emptyEvent.getEvent());
    readerGroup.initiateCheckpoint("cp4", executor);
    cpEvent = reader.readNextEvent(1000);
    assertEquals("cp4", cpEvent.getCheckpointName());
    // Reader releases segments here
    event0 = reader.readNextEvent(5000);
    assertNotNull(event0.getEvent());
    cuts = readerGroup.getStreamCuts();
    segmentNames = new HashSet<>();
    long four = computeSegmentId(4, 3);
    long five = computeSegmentId(5, 3);
    long six = computeSegmentId(6, 3);
    long seven = computeSegmentId(7, 3);
    segmentNames.add(getQualifiedStreamSegmentName("test", "test", four));
    segmentNames.add(getQualifiedStreamSegmentName("test", "test", five));
    segmentNames.add(getQualifiedStreamSegmentName("test", "test", six));
    segmentNames.add(getQualifiedStreamSegmentName("test", "test", seven));
    validateCuts(readerGroup, cuts, Collections.unmodifiableSet(segmentNames));
}
Also used : HashMap(java.util.HashMap) ReaderGroup(io.pravega.client.stream.ReaderGroup) ArrayList(java.util.ArrayList) Cleanup(lombok.Cleanup) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Stream(io.pravega.client.stream.Stream) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) Controller(io.pravega.client.control.impl.Controller) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) Checkpoint(io.pravega.client.stream.Checkpoint) Map(java.util.Map) HashMap(java.util.HashMap) Test(org.junit.Test)

Example 64 with Controller

use of io.pravega.client.control.impl.Controller in project pravega by pravega.

the class ControllerServiceTest method testControllerService.

@Test(timeout = 80000)
public void testControllerService() throws Exception {
    final String scope1 = "scope1";
    final String scope2 = "scope2";
    controllerWrapper.getControllerService().createScope("scope1", 0L).get();
    controllerWrapper.getControllerService().createScope("scope2", 0L).get();
    Controller controller = controllerWrapper.getController();
    final String streamName1 = "stream1";
    final String streamName2 = "stream2";
    final String streamName3 = "stream3";
    final ScalingPolicy scalingPolicy = ScalingPolicy.fixed(2);
    final StreamConfiguration config1 = StreamConfiguration.builder().scalingPolicy(scalingPolicy).build();
    final StreamConfiguration config2 = StreamConfiguration.builder().scalingPolicy(scalingPolicy).build();
    final StreamConfiguration config3 = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(3)).build();
    createAStream(scope1, streamName1, controller, config1);
    // Same name in different scope
    createAStream(scope2, streamName1, controller, config2);
    // Different name in same scope
    createAStream(scope1, streamName2, controller, config3);
    createAStream(scope1, streamName3, controller, config3);
    final String kvtName1 = "kvtable1";
    final String kvtName2 = "kvtable2";
    final KeyValueTableConfiguration kvtConfig1 = KeyValueTableConfiguration.builder().partitionCount(2).primaryKeyLength(4).secondaryKeyLength(4).build();
    createAKeyValueTable(scope1, kvtName1, controller, kvtConfig1);
    // Same name in different scope
    createAKeyValueTable(scope2, kvtName1, controller, kvtConfig1);
    // Different name in different scope
    createAKeyValueTable(scope2, kvtName2, controller, kvtConfig1);
    final String scopeSeal = "scopeSeal";
    final String streamNameSeal = "streamSeal";
    sealAStream(controllerWrapper, controller, scalingPolicy, scopeSeal, streamNameSeal);
    sealASealedStream(controller, scopeSeal, streamNameSeal);
    sealNonExistantStream(controller, scopeSeal);
    streamDuplicationNotAllowed(scope1, streamName1, controller, config1);
    // update stream config section
    updateStreamName(controller, scope1, scalingPolicy);
    updateScalingPolicy(controller, scope1, streamName1);
    updateTargetRate(controller, scope1, streamName1);
    updateScaleFactor(controller, scope1, streamName1);
    updataMinSegmentes(controller, scope1, streamName1);
    updateConfigOfNonExistantStream(controller);
    // get currently active segments
    getActiveSegments(controller, scope1, streamName1);
    getActiveSegmentsForNonExistentStream(controller);
    // get positions at a given time stamp
    getSegmentsAtTime(controller, scope1, streamName1);
    getSegmentsAtTime(controller, scope1, streamName2);
    getSegmentsForNonExistentStream(controller);
    getSegmentsBeforeCreation(controller, scope1, streamName1);
    getSegmentsAfterCreation(controller, scope1, streamName1);
    readerGroupsTest(controller, scope1, streamName1, streamName2, streamName3);
    updateSubscriberStreamCutTest(controller, scope2, streamName1);
}
Also used : ScalingPolicy(io.pravega.client.stream.ScalingPolicy) KeyValueTableConfiguration(io.pravega.client.tables.KeyValueTableConfiguration) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Controller(io.pravega.client.control.impl.Controller) Test(org.junit.Test)

Example 65 with Controller

use of io.pravega.client.control.impl.Controller in project pravega by pravega.

the class AppendTest method appendThroughConditionalClient.

@Test(timeout = 10000)
public void appendThroughConditionalClient() throws Exception {
    String endpoint = "localhost";
    int port = TestUtils.getAvailableListenPort();
    String testString = "Hello world\n";
    String scope = "scope";
    String stream = "appendThroughConditionalClient";
    StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
    TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
    @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
    server.startListening();
    @Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
    @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF);
    @Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true);
    controller.createScope(scope);
    controller.createStream(scope, stream, StreamConfiguration.builder().build());
    ConditionalOutputStreamFactoryImpl segmentClient = new ConditionalOutputStreamFactoryImpl(controller, connectionPool);
    Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
    @Cleanup ConditionalOutputStream out = segmentClient.createConditionalOutputStream(segment, DelegationTokenProviderFactory.createWithEmptyToken(), EventWriterConfig.builder().build());
    assertTrue(out.write(ByteBuffer.wrap(testString.getBytes()), 0));
}
Also used : ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) ConditionalOutputStream(io.pravega.client.segment.impl.ConditionalOutputStream) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) MockController(io.pravega.client.stream.mock.MockController) Controller(io.pravega.client.control.impl.Controller) Cleanup(lombok.Cleanup) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) CreateSegment(io.pravega.shared.protocol.netty.WireCommands.CreateSegment) Segment(io.pravega.client.segment.impl.Segment) NoSuchSegment(io.pravega.shared.protocol.netty.WireCommands.NoSuchSegment) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) ConditionalOutputStreamFactoryImpl(io.pravega.client.segment.impl.ConditionalOutputStreamFactoryImpl) MockController(io.pravega.client.stream.mock.MockController) Test(org.junit.Test)

Aggregations

Controller (io.pravega.client.control.impl.Controller)120 Test (org.junit.Test)95 Cleanup (lombok.Cleanup)81 Segment (io.pravega.client.segment.impl.Segment)53 EventWriterConfig (io.pravega.client.stream.EventWriterConfig)50 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)47 Stream (io.pravega.client.stream.Stream)37 CompletableFuture (java.util.concurrent.CompletableFuture)35 SegmentOutputStreamFactory (io.pravega.client.segment.impl.SegmentOutputStreamFactory)34 SocketConnectionFactoryImpl (io.pravega.client.connection.impl.SocketConnectionFactoryImpl)33 HashMap (java.util.HashMap)29 ClientConfig (io.pravega.client.ClientConfig)28 ClientFactoryImpl (io.pravega.client.stream.impl.ClientFactoryImpl)28 StreamImpl (io.pravega.client.stream.impl.StreamImpl)25 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)24 Slf4j (lombok.extern.slf4j.Slf4j)24 Before (org.junit.Before)23 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)22 ConnectionPoolImpl (io.pravega.client.connection.impl.ConnectionPoolImpl)21 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)21