Search in sources :

Example 11 with ConnectionPoolImpl

use of io.pravega.client.connection.impl.ConnectionPoolImpl in project pravega by pravega.

the class IntermittentCnxnFailureTest method setup.

@Before
public void setup() throws Exception {
    MetricsConfig metricsConfig = MetricsConfig.builder().with(MetricsConfig.ENABLE_STATISTICS, true).with(MetricsConfig.ENABLE_STATSD_REPORTER, false).build();
    metricsConfig.setDynamicCacheEvictionDuration(Duration.ofSeconds(60));
    MetricsProvider.initialize(metricsConfig);
    statsProvider = MetricsProvider.getMetricsProvider();
    statsProvider.startWithoutExporting();
    streamStore = spy(StreamStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor));
    bucketStore = StreamStoreFactory.createZKBucketStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
    TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
    HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
    connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), new SocketConnectionFactoryImpl(ClientConfig.builder().build()));
    segmentHelperMock = spy(new SegmentHelper(connectionPool, hostStore, executor));
    doReturn(Controller.NodeUri.newBuilder().setEndpoint("localhost").setPort(Config.SERVICE_PORT).build()).when(segmentHelperMock).getSegmentUri(anyString(), anyString(), anyInt());
    streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", GrpcAuthHelper.getDisabledAuthHelper());
    streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", GrpcAuthHelper.getDisabledAuthHelper());
    controllerService = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
    StreamMetrics.initialize();
    controllerService.createScope(SCOPE, 0L).get();
}
Also used : TaskMetadataStore(io.pravega.controller.store.task.TaskMetadataStore) HostControllerStore(io.pravega.controller.store.host.HostControllerStore) ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) SegmentHelper(io.pravega.controller.server.SegmentHelper) ControllerService(io.pravega.controller.server.ControllerService) MetricsConfig(io.pravega.shared.metrics.MetricsConfig) Before(org.junit.Before)

Example 12 with ConnectionPoolImpl

use of io.pravega.client.connection.impl.ConnectionPoolImpl in project pravega by pravega.

the class ReadTest method readThroughSegmentClient.

@Test(timeout = 10000)
public void readThroughSegmentClient() throws SegmentSealedException, EndOfSegmentException, SegmentTruncatedException {
    String endpoint = "localhost";
    String scope = "scope";
    String stream = "readThroughSegmentClient";
    int port = TestUtils.getAvailableListenPort();
    String testString = "Hello world\n";
    StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
    TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
    @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
    server.startListening();
    @Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
    @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF);
    @Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true);
    controller.createScope(scope);
    controller.createStream(scope, stream, StreamConfiguration.builder().build());
    SegmentOutputStreamFactoryImpl segmentproducerClient = new SegmentOutputStreamFactoryImpl(controller, connectionPool);
    SegmentInputStreamFactoryImpl segmentConsumerClient = new SegmentInputStreamFactoryImpl(controller, connectionPool);
    Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
    @Cleanup SegmentOutputStream out = segmentproducerClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), DelegationTokenProviderFactory.createWithEmptyToken());
    out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(testString.getBytes()), new CompletableFuture<>()));
    out.flush();
    @Cleanup EventSegmentReader in = segmentConsumerClient.createEventReaderForSegment(segment);
    ByteBuffer result = in.read();
    assertEquals(ByteBuffer.wrap(testString.getBytes()), result);
    // Test large write followed by read
    out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
    out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
    out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[150000]), new CompletableFuture<>()));
    assertEquals(in.read().capacity(), 15);
    assertEquals(in.read().capacity(), 15);
    assertEquals(in.read().capacity(), 150000);
}
Also used : ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) MockController(io.pravega.client.stream.mock.MockController) Controller(io.pravega.client.control.impl.Controller) Cleanup(lombok.Cleanup) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) ByteBuffer(java.nio.ByteBuffer) Segment(io.pravega.client.segment.impl.Segment) ReadSegment(io.pravega.shared.protocol.netty.WireCommands.ReadSegment) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) CompletableFuture(java.util.concurrent.CompletableFuture) SegmentOutputStreamFactoryImpl(io.pravega.client.segment.impl.SegmentOutputStreamFactoryImpl) SegmentOutputStream(io.pravega.client.segment.impl.SegmentOutputStream) EventSegmentReader(io.pravega.client.segment.impl.EventSegmentReader) MockController(io.pravega.client.stream.mock.MockController) SegmentInputStreamFactoryImpl(io.pravega.client.segment.impl.SegmentInputStreamFactoryImpl) Test(org.junit.Test)

Example 13 with ConnectionPoolImpl

use of io.pravega.client.connection.impl.ConnectionPoolImpl in project pravega by pravega.

the class ReadWriteTest method readWriteTest.

@Test(timeout = 60000)
public void readWriteTest() throws InterruptedException, ExecutionException {
    String scope = "testMultiReaderWriterScope";
    String readerGroupName = "testMultiReaderWriterReaderGroup";
    // 20  readers -> 20 stream segments ( to have max read parallelism)
    ScalingPolicy scalingPolicy = ScalingPolicy.fixed(20);
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(scalingPolicy).build();
    eventsReadFromPravega = new ConcurrentLinkedQueue<>();
    // data used by each of the writers.
    eventData = new AtomicLong();
    // used by readers to maintain a count of events.
    eventReadCount = new AtomicLong();
    stopReadFlag = new AtomicBoolean(false);
    ClientConfig clientConfig = ClientConfig.builder().build();
    try (ConnectionPool cp = new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig));
        StreamManager streamManager = new StreamManagerImpl(controller, cp)) {
        // create a scope
        Boolean createScopeStatus = streamManager.createScope(scope);
        log.info("Create scope status {}", createScopeStatus);
        // create a stream
        Boolean createStreamStatus = streamManager.createStream(scope, STREAM_NAME, config);
        log.info("Create stream status {}", createStreamStatus);
    }
    try (ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
        ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
        ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, controller, clientFactory)) {
        // start writing events to the stream
        log.info("Creating {} writers", NUM_WRITERS);
        List<CompletableFuture<Void>> writerList = new ArrayList<>();
        for (int i = 0; i < NUM_WRITERS; i++) {
            log.info("Starting writer{}", i);
            writerList.add(startNewWriter(eventData, clientFactory));
        }
        // create a reader group
        log.info("Creating Reader group : {}", readerGroupName);
        readerGroupManager.createReaderGroup(readerGroupName, ReaderGroupConfig.builder().stream(Stream.of(scope, STREAM_NAME)).build());
        log.info("Reader group name {} ", readerGroupManager.getReaderGroup(readerGroupName).getGroupName());
        log.info("Reader group scope {}", readerGroupManager.getReaderGroup(readerGroupName).getScope());
        // create readers
        log.info("Creating {} readers", NUM_READERS);
        List<CompletableFuture<Void>> readerList = new ArrayList<>();
        String readerName = "reader" + RandomFactory.create().nextInt(Integer.MAX_VALUE);
        // start reading events
        for (int i = 0; i < NUM_READERS; i++) {
            log.info("Starting reader{}", i);
            readerList.add(startNewReader(readerName + i, clientFactory, readerGroupName, eventsReadFromPravega, eventData, eventReadCount, stopReadFlag));
        }
        // wait for writers completion
        Futures.allOf(writerList).get();
        ExecutorServiceHelpers.shutdown(writerPool);
        // set stop read flag to true
        stopReadFlag.set(true);
        // wait for readers completion
        Futures.allOf(readerList).get();
        ExecutorServiceHelpers.shutdown(readerPool);
        // delete readergroup
        log.info("Deleting readergroup {}", readerGroupName);
        readerGroupManager.deleteReaderGroup(readerGroupName);
    }
    log.info("All writers have stopped. Setting Stop_Read_Flag. Event Written Count:{}, Event Read " + "Count: {}", eventData.get(), eventsReadFromPravega.size());
    assertEquals(TOTAL_NUM_EVENTS, eventsReadFromPravega.size());
    // check unique events.
    assertEquals(TOTAL_NUM_EVENTS, new TreeSet<>(eventsReadFromPravega).size());
    // seal the stream
    CompletableFuture<Boolean> sealStreamStatus = controller.sealStream(scope, STREAM_NAME);
    log.info("Sealing stream {}", STREAM_NAME);
    assertTrue(sealStreamStatus.get());
    // delete the stream
    CompletableFuture<Boolean> deleteStreamStatus = controller.deleteStream(scope, STREAM_NAME);
    log.info("Deleting stream {}", STREAM_NAME);
    assertTrue(deleteStreamStatus.get());
    // delete the  scope
    CompletableFuture<Boolean> deleteScopeStatus = controller.deleteScope(scope);
    log.info("Deleting scope {}", scope);
    assertTrue(deleteScopeStatus.get());
    log.info("Read write test succeeds");
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) ArrayList(java.util.ArrayList) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) AtomicLong(java.util.concurrent.atomic.AtomicLong) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) CompletableFuture(java.util.concurrent.CompletableFuture) StreamManager(io.pravega.client.admin.StreamManager) TreeSet(java.util.TreeSet) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ClientConfig(io.pravega.client.ClientConfig) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Test(org.junit.Test)

Example 14 with ConnectionPoolImpl

use of io.pravega.client.connection.impl.ConnectionPoolImpl in project pravega by pravega.

the class AppendTest method appendThroughSegmentClient.

@Test(timeout = 10000)
public void appendThroughSegmentClient() throws Exception {
    String endpoint = "localhost";
    int port = TestUtils.getAvailableListenPort();
    String testString = "Hello world\n";
    String scope = "scope";
    String stream = "appendThroughSegmentClient";
    StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
    TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
    @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
    server.startListening();
    @Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
    @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF);
    @Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true);
    controller.createScope(scope);
    controller.createStream(scope, stream, StreamConfiguration.builder().build());
    SegmentOutputStreamFactoryImpl segmentClient = new SegmentOutputStreamFactoryImpl(controller, connectionPool);
    Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
    @Cleanup SegmentOutputStream out = segmentClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), DelegationTokenProviderFactory.createWithEmptyToken());
    CompletableFuture<Void> ack = new CompletableFuture<>();
    out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(testString.getBytes()), ack));
    ack.get(5, TimeUnit.SECONDS);
}
Also used : ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) MockController(io.pravega.client.stream.mock.MockController) Controller(io.pravega.client.control.impl.Controller) Cleanup(lombok.Cleanup) PravegaConnectionListener(io.pravega.segmentstore.server.host.handler.PravegaConnectionListener) CreateSegment(io.pravega.shared.protocol.netty.WireCommands.CreateSegment) Segment(io.pravega.client.segment.impl.Segment) NoSuchSegment(io.pravega.shared.protocol.netty.WireCommands.NoSuchSegment) TableStore(io.pravega.segmentstore.contracts.tables.TableStore) StreamSegmentStore(io.pravega.segmentstore.contracts.StreamSegmentStore) CompletableFuture(java.util.concurrent.CompletableFuture) SegmentOutputStreamFactoryImpl(io.pravega.client.segment.impl.SegmentOutputStreamFactoryImpl) SegmentOutputStream(io.pravega.client.segment.impl.SegmentOutputStream) MockController(io.pravega.client.stream.mock.MockController) Test(org.junit.Test)

Example 15 with ConnectionPoolImpl

use of io.pravega.client.connection.impl.ConnectionPoolImpl in project pravega by pravega.

the class MetricsTest method metricsTimeBasedCacheEvictionTest.

@Test(timeout = 120000)
public void metricsTimeBasedCacheEvictionTest() throws Exception {
    ClientConfig clientConfig = ClientConfig.builder().build();
    try (ConnectionPool cp = new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig));
        StreamManager streamManager = new StreamManagerImpl(controller, cp)) {
        boolean createScopeStatus = streamManager.createScope(scope);
        log.info("Create scope status {}", createScopeStatus);
        boolean createStreamStatus = streamManager.createStream(scope, STREAM_NAME, config);
        log.info("Create stream status {}", createStreamStatus);
    }
    try (ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
        ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
        ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, controller, clientFactory)) {
        @Cleanup EventStreamWriter<String> writer1 = clientFactory.createEventWriter(STREAM_NAME, new UTF8StringSerializer(), EventWriterConfig.builder().build());
        String event = "12345";
        long bytesWritten = TOTAL_NUM_EVENTS * (8 + event.length());
        writeEvents(event, writer1);
        String readerGroupName1 = readerGroupName + "1";
        log.info("Creating Reader group : {}", readerGroupName1);
        readerGroupManager.createReaderGroup(readerGroupName1, ReaderGroupConfig.builder().stream(Stream.of(scope, STREAM_NAME)).automaticCheckpointIntervalMillis(2000).build());
        EventStreamReader<String> reader1 = clientFactory.createReader(readerName, readerGroupName1, new UTF8StringSerializer(), ReaderConfig.builder().build());
        readAllEvents(reader1);
        final String[] streamTags = segmentTags(scope + "/" + STREAM_NAME + "/0.#epoch.0");
        assertEquals(bytesWritten, (long) MetricRegistryUtils.getCounter(SEGMENT_READ_BYTES, streamTags).count());
        // Wait for cache eviction to happen
        Thread.sleep(5000);
        String readerGroupName2 = readerGroupName + "2";
        log.info("Creating Reader group : {}", readerGroupName2);
        readerGroupManager.createReaderGroup(readerGroupName2, ReaderGroupConfig.builder().stream(Stream.of(scope, STREAM_NAME)).automaticCheckpointIntervalMillis(2000).build());
        EventStreamReader<String> reader2 = clientFactory.createReader(readerName, readerGroupName2, new UTF8StringSerializer(), ReaderConfig.builder().build());
        readAllEvents(reader2);
        // Metric is evicted from Cache, after cache eviction duration
        // Count starts from 0, rather than adding up to previously ready bytes, as cache is evicted.
        assertEquals(bytesWritten, (long) MetricRegistryUtils.getCounter(SEGMENT_READ_BYTES, streamTags).count());
        Map<Double, Double> map = new HashMap<>();
        map.put(0.0, 1.0);
        // Seal segment 0, create segment 1
        CompletableFuture<Boolean> scaleStatus = controller.scaleStream(new StreamImpl(scope, STREAM_NAME), Collections.singletonList(0L), map, executorService()).getFuture();
        Assert.assertTrue(scaleStatus.get());
        @Cleanup EventStreamWriter<String> writer2 = clientFactory.createEventWriter(STREAM_NAME, new UTF8StringSerializer(), EventWriterConfig.builder().build());
        writeEvents(event, writer2);
        readAllEvents(reader1);
        final String[] streamTags2nd = segmentTags(scope + "/" + STREAM_NAME + "/1.#epoch.1");
        assertEquals(bytesWritten, (long) MetricRegistryUtils.getCounter(SEGMENT_READ_BYTES, streamTags2nd).count());
        readerGroupManager.deleteReaderGroup(readerGroupName1);
        readerGroupManager.deleteReaderGroup(readerGroupName2);
        CompletableFuture<Boolean> sealStreamStatus = controller.sealStream(scope, STREAM_NAME);
        log.info("Sealing stream {}", STREAM_NAME);
        assertTrue(sealStreamStatus.get());
        CompletableFuture<Boolean> deleteStreamStatus = controller.deleteStream(scope, STREAM_NAME);
        log.info("Deleting stream {}", STREAM_NAME);
        assertTrue(deleteStreamStatus.get());
        CompletableFuture<Boolean> deleteScopeStatus = controller.deleteScope(scope);
        log.info("Deleting scope {}", scope);
        assertTrue(deleteScopeStatus.get());
    }
    log.info("Metrics Time based Cache Eviction test succeeds");
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) HashMap(java.util.HashMap) ConnectionPoolImpl(io.pravega.client.connection.impl.ConnectionPoolImpl) StreamManagerImpl(io.pravega.client.admin.impl.StreamManagerImpl) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) Cleanup(lombok.Cleanup) ConnectionFactory(io.pravega.client.connection.impl.ConnectionFactory) ClientFactoryImpl(io.pravega.client.stream.impl.ClientFactoryImpl) StreamManager(io.pravega.client.admin.StreamManager) StreamImpl(io.pravega.client.stream.impl.StreamImpl) UTF8StringSerializer(io.pravega.client.stream.impl.UTF8StringSerializer) ClientConfig(io.pravega.client.ClientConfig) ReaderGroupManagerImpl(io.pravega.client.admin.impl.ReaderGroupManagerImpl) Test(org.junit.Test)

Aggregations

ConnectionPoolImpl (io.pravega.client.connection.impl.ConnectionPoolImpl)45 SocketConnectionFactoryImpl (io.pravega.client.connection.impl.SocketConnectionFactoryImpl)34 Test (org.junit.Test)34 Cleanup (lombok.Cleanup)33 ClientConfig (io.pravega.client.ClientConfig)31 StreamManager (io.pravega.client.admin.StreamManager)20 Controller (io.pravega.client.control.impl.Controller)20 ConnectionPool (io.pravega.client.connection.impl.ConnectionPool)19 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)17 StreamManagerImpl (io.pravega.client.admin.impl.StreamManagerImpl)14 MockController (io.pravega.client.stream.mock.MockController)14 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)12 ClientFactoryImpl (io.pravega.client.stream.impl.ClientFactoryImpl)12 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)11 Stream (io.pravega.client.stream.Stream)11 ReaderGroupManagerImpl (io.pravega.client.admin.impl.ReaderGroupManagerImpl)10 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)9 StreamImpl (io.pravega.client.stream.impl.StreamImpl)9 Segment (io.pravega.client.segment.impl.Segment)8 PravegaConnectionListener (io.pravega.segmentstore.server.host.handler.PravegaConnectionListener)8