Search in sources :

Example 6 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class BoundedStreamReaderTest method testBoundedStreamWithTruncationTest.

@Test(timeout = 60000)
public void testBoundedStreamWithTruncationTest() throws Exception {
    createScope(SCOPE);
    createStream(STREAM3);
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, ClientConfig.builder().controllerURI(controllerUri).build());
    @Cleanup EventStreamWriter<String> writer1 = clientFactory.createEventWriter(STREAM3, serializer, EventWriterConfig.builder().build());
    // Prep the stream with data.
    // 1.Write events with event size of 30
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(1)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(2)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(3)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(4)).get();
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, controllerUri);
    // Streamcut pointing to event 2.
    StreamCut offset30SC = getStreamCut(STREAM3, 30L, 0);
    StreamCut offset60SC = getStreamCut(STREAM3, 60L, 0);
    groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM3), // startStreamCut points to second event in the stream.
    offset30SC, // endStreamCut points to the offset after two events.(i.e 2 * 30(event size) = 60)
    offset60SC).build());
    final ReaderGroup rg = groupManager.getReaderGroup("group");
    // Create a reader
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "group", serializer, ReaderConfig.builder().build());
    // 2. Verify if endStreamCut configuration is enforced.
    readAndVerify(reader, 2);
    // The following read should not return events 3, 4 due to the endStreamCut configuration.
    Assert.assertNull("Null is expected", reader.readNextEvent(2000).getEvent());
    truncateStream(STREAM3, offset60SC);
    // Truncation should not affect the reader as it is already post the truncation point.
    Assert.assertNull("Null is expected", reader.readNextEvent(2000).getEvent());
    // Reset RG with startStreamCut which is already truncated.
    rg.resetReaderGroup(ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM3), offset30SC, StreamCut.UNBOUNDED).build());
    verifyReinitializationRequiredException(reader);
    // Create a reader
    @Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("readerId2", "group", serializer, ReaderConfig.builder().build());
    assertThrows(TruncatedDataException.class, () -> reader2.readNextEvent(10000));
    // subsequent read should return data present post truncation, Event3 is returned here since stream was truncated @ offset 30 * 2.
    readAndVerify(reader2, 3);
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 7 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class BoundedStreamReaderTest method testReaderGroupWithSameBounds.

@Test(timeout = 60000)
public void testReaderGroupWithSameBounds() throws Exception {
    createScope(SCOPE);
    createStream(STREAM1);
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, ClientConfig.builder().controllerURI(controllerUri).build());
    @Cleanup EventStreamWriter<String> writer1 = clientFactory.createEventWriter(STREAM1, serializer, EventWriterConfig.builder().build());
    // 1. Prep the stream with data.
    // Write events with event size of 30
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(1)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(2)).get();
    // 2. Create a StreamCut Pointing to offset 30L
    StreamCut streamCut = getStreamCut(STREAM1, 30L, 0);
    // 3. Create a ReaderGroup where the lower and upper bound are the same.
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, controllerUri);
    groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM1), streamCut, streamCut).build());
    // 4. Create a reader
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "group", serializer, ReaderConfig.builder().build());
    // 5. Verify if configuration is enforced.
    Assert.assertNull("Null is expected", reader.readNextEvent(1000).getEvent());
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 8 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class BoundedStreamReaderTest method testBoundedStreamWithScaleTest.

@Test(timeout = 60000)
public void testBoundedStreamWithScaleTest() throws Exception {
    createScope(SCOPE);
    createStream(STREAM1);
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, ClientConfig.builder().controllerURI(controllerUri).build());
    @Cleanup EventStreamWriter<String> writer1 = clientFactory.createEventWriter(STREAM1, serializer, EventWriterConfig.builder().build());
    // Prep the stream with data.
    // 1.Write events with event size of 30
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(1)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(2)).get();
    // 2.Scale stream
    Map<Double, Double> newKeyRanges = new HashMap<>();
    newKeyRanges.put(0.0, 0.33);
    newKeyRanges.put(0.33, 0.66);
    newKeyRanges.put(0.66, 1.0);
    scaleStream(STREAM1, newKeyRanges);
    // 3.Write three events with event size of 30
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(3)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(4)).get();
    writer1.writeEvent(keyGenerator.get(), getEventData.apply(5)).get();
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, controllerUri);
    ReaderGroupConfig readerGroupCfg1 = ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(0).stream(Stream.of(SCOPE, STREAM1), // startStreamCut points to the current HEAD of stream
    StreamCut.UNBOUNDED, // endStreamCut points to the offset after two events.(i.e 2 * 30(event size) = 60)
    getStreamCut(STREAM1, 60L, 0)).build();
    groupManager.createReaderGroup("group", readerGroupCfg1);
    ReaderGroup readerGroup = groupManager.getReaderGroup("group");
    // Create a reader
    @Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId1", "group", serializer, ReaderConfig.builder().build());
    // 2. Verify if endStreamCut configuration is enforced.
    readAndVerify(reader1, 1, 2);
    // The following read should not return events 3, 4 due to the endStreamCut configuration.
    Assert.assertNull("Null is expected", reader1.readNextEvent(2000).getEvent());
    final ReaderGroupConfig readerGroupCfg2 = ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(0).stream(Stream.of(SCOPE, STREAM1), getStreamCut(STREAM1, 60L, 0), // endStreamCut points to the offset after two events.(i.e 2 * 30(event size) = 60)
    getStreamCut(STREAM1, 90L, 1, 2, 3)).build();
    readerGroup.resetReaderGroup(readerGroupCfg2);
    verifyReinitializationRequiredException(reader1);
    // Create a reader
    @Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("readerId2", "group", serializer, ReaderConfig.builder().build());
    assertNull(reader2.readNextEvent(100).getEvent());
    readerGroup.initiateCheckpoint("c1", executorService());
    readAndVerify(reader2, 3, 4, 5);
    Assert.assertNull("Null is expected", reader2.readNextEvent(2000).getEvent());
}
Also used : ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) HashMap(java.util.HashMap) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) Test(org.junit.Test)

Example 9 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class MultiReadersEndToEndTest method runTest.

private void runTest(final Set<String> streamNames, final int numParallelReaders, final int numSegments) throws Exception {
    @Cleanup StreamManager streamManager = StreamManager.create(ClientConfig.builder().controllerURI(SETUP_UTILS.getControllerUri()).build());
    streamManager.createScope(SETUP_UTILS.getScope());
    streamNames.stream().forEach(stream -> {
        streamManager.createStream(SETUP_UTILS.getScope(), stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(numSegments)).build());
        log.info("Created stream: {}", stream);
    });
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SETUP_UTILS.getScope(), ClientConfig.builder().controllerURI(SETUP_UTILS.getControllerUri()).build());
    streamNames.stream().forEach(stream -> {
        @Cleanup EventStreamWriter<Integer> eventWriter = clientFactory.createEventWriter(stream, new IntegerSerializer(), EventWriterConfig.builder().build());
        for (Integer i = 0; i < NUM_TEST_EVENTS; i++) {
            eventWriter.writeEvent(String.valueOf(i), i);
        }
        eventWriter.flush();
        log.info("Wrote {} events", NUM_TEST_EVENTS);
    });
    final String readerGroupName = "testreadergroup" + RandomStringUtils.randomAlphanumeric(10).toLowerCase();
    @Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(SETUP_UTILS.getScope(), ClientConfig.builder().controllerURI(SETUP_UTILS.getControllerUri()).build());
    ReaderGroupConfig.ReaderGroupConfigBuilder builder = ReaderGroupConfig.builder();
    streamNames.forEach(s -> builder.stream(Stream.of(SETUP_UTILS.getScope(), s)));
    readerGroupManager.createReaderGroup(readerGroupName, builder.build());
    Collection<Integer> read = readAllEvents(numParallelReaders, clientFactory, readerGroupName, numSegments);
    Assert.assertEquals(NUM_TEST_EVENTS * streamNames.size(), read.size());
    // Check unique events.
    Assert.assertEquals(NUM_TEST_EVENTS, new TreeSet<>(read).size());
    readerGroupManager.deleteReaderGroup(readerGroupName);
}
Also used : ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamManager(io.pravega.client.admin.StreamManager) MockStreamManager(io.pravega.client.stream.mock.MockStreamManager) TreeSet(java.util.TreeSet) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) IntegerSerializer(io.pravega.test.integration.utils.IntegerSerializer)

Example 10 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class ReadWithReadPermissionsTest method readsFromADifferentScopeTest.

@Test
public void readsFromADifferentScopeTest() {
    String marketDataWriter = "writer";
    String marketDataReader = "reader";
    String password = "test-password";
    String marketDataScope = "marketdata";
    String computeScope = "compute";
    String stream1 = "stream1";
    final Map<String, String> passwordInputFileEntries = new HashMap<>();
    passwordInputFileEntries.put(marketDataWriter, String.join(";", // Allows user to create the "marketdata" scope, for this test
    "prn::/,READ_UPDATE", // Allows user to create stream (and other scope children)
    "prn::/scope:marketdata,READ_UPDATE", // Provides user all access to child objects of the "marketdata" scope
    "prn::/scope:marketdata/*,READ_UPDATE"));
    passwordInputFileEntries.put(marketDataReader, String.join(";", // Allows use to create the "compute" home scope
    "prn::/,READ_UPDATE", // Allows user to create reader-group under its home scope
    "prn::/scope:compute,READ_UPDATE", // Provides user all access to child objects of the "compute" scope
    "prn::/scope:compute/*,READ_UPDATE", // Provides use read access to the "marketdata/stream1" stream.
    "prn::/scope:marketdata/stream:stream1,READ"));
    // Setup and run the servers
    @Cleanup final ClusterWrapper cluster = ClusterWrapper.builder().authEnabled(true).tokenSigningKeyBasis("secret").tokenTtlInSeconds(600).rgWritesWithReadPermEnabled(false).passwordAuthHandlerEntries(TestUtils.preparePasswordInputFileEntries(passwordInputFileEntries, password)).build();
    cluster.start();
    // Prepare a client config for the `marketDataWriter`, whose home scope is "marketdata"
    final ClientConfig writerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(password, marketDataWriter)).build();
    // Create scope/stream `marketdata/stream1`
    TestUtils.createScopeAndStreams(writerClientConfig, marketDataScope, Arrays.asList(stream1));
    // Write a message to stream `marketdata/stream1`
    TestUtils.writeDataToStream(marketDataScope, stream1, "test message", writerClientConfig);
    // Prepare a client config for `marketDataReader`, whose home scope is "compute"
    ClientConfig readerClientConfig = ClientConfig.builder().controllerURI(URI.create(cluster.controllerUri())).credentials(new DefaultCredentials(password, marketDataReader)).build();
    // Create scope `compute` (without any streams)
    TestUtils.createScopeAndStreams(readerClientConfig, computeScope, new ArrayList<>());
    // Create a reader group config that enables a user to read data from `marketdata/stream1`
    ReaderGroupConfig readerGroupConfig = ReaderGroupConfig.builder().stream(Stream.of(marketDataScope, stream1)).disableAutomaticCheckpoints().build();
    // Create a reader-group for user `marketDataReader` in `compute` scope, which is its home scope.
    @Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(computeScope, readerClientConfig);
    readerGroupManager.createReaderGroup("testRg", readerGroupConfig);
    @Cleanup EventStreamClientFactory readerClientFactory = EventStreamClientFactory.withScope(computeScope, readerClientConfig);
    @Cleanup EventStreamReader<String> reader = readerClientFactory.createReader("readerId", "testRg", new JavaSerializer<String>(), ReaderConfig.builder().initialAllocationDelay(0).build());
    String readMessage = reader.readNextEvent(5000).getEvent();
    assertEquals("test message", readMessage);
}
Also used : ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) HashMap(java.util.HashMap) ClusterWrapper(io.pravega.test.integration.demo.ClusterWrapper) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) DefaultCredentials(io.pravega.shared.security.auth.DefaultCredentials) ClientConfig(io.pravega.client.ClientConfig) Test(org.junit.Test)

Aggregations

EventStreamClientFactory (io.pravega.client.EventStreamClientFactory)57 Cleanup (lombok.Cleanup)50 Test (org.junit.Test)41 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)36 ClientConfig (io.pravega.client.ClientConfig)21 ReaderGroup (io.pravega.client.stream.ReaderGroup)19 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)19 StreamCut (io.pravega.client.stream.StreamCut)19 HashMap (java.util.HashMap)18 StreamManager (io.pravega.client.admin.StreamManager)17 ReaderGroupConfig (io.pravega.client.stream.ReaderGroupConfig)16 Stream (io.pravega.client.stream.Stream)16 Map (java.util.Map)16 Segment (io.pravega.client.segment.impl.Segment)13 EventWriterConfig (io.pravega.client.stream.EventWriterConfig)12 Controller (io.pravega.client.control.impl.Controller)11 EventStreamWriter (io.pravega.client.stream.EventStreamWriter)10 JavaSerializer (io.pravega.client.stream.impl.JavaSerializer)10 Futures (io.pravega.common.concurrent.Futures)10 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)9