Search in sources :

Example 26 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class UnreadBytesTest method testUnreadBytes.

@Test(timeout = 50000)
public void testUnreadBytes() throws Exception {
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
    String streamName = "testUnreadBytes";
    Controller controller = PRAVEGA.getLocalController();
    controller.createScope("unreadbytes").get();
    controller.createStream("unreadbytes", streamName, config).get();
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
    String group = "testUnreadBytes-group";
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName).build());
    @Cleanup ReaderGroup readerGroup = groupManager.getReaderGroup(group);
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build());
    long unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
    writer.writeEvent("0", "data of size 30").get();
    writer.writeEvent("0", "data of size 30").get();
    EventRead<String> firstEvent = reader.readNextEvent(15000);
    EventRead<String> secondEvent = reader.readNextEvent(15000);
    assertNotNull(firstEvent);
    assertEquals("data of size 30", firstEvent.getEvent());
    assertNotNull(secondEvent);
    assertEquals("data of size 30", secondEvent.getEvent());
    // trigger a checkpoint.
    CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService());
    EventRead<String> chkpointEvent = reader.readNextEvent(15000);
    assertEquals("test", chkpointEvent.getCheckpointName());
    EventRead<String> emptyEvent = reader.readNextEvent(100);
    assertEquals(false, emptyEvent.isCheckpoint());
    assertEquals(null, emptyEvent.getEvent());
    chkPointResult.join();
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
    writer.writeEvent("0", "data of size 30").get();
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30);
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Controller(io.pravega.client.control.impl.Controller) Cleanup(lombok.Cleanup) Checkpoint(io.pravega.client.stream.Checkpoint) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Test(org.junit.Test)

Example 27 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class UnreadBytesTest method testUnreadBytesWithCheckpointsAndStreamCuts.

@Test
public void testUnreadBytesWithCheckpointsAndStreamCuts() throws Exception {
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
    String streamName = "testUnreadBytesWithCheckpointsAndStreamCuts";
    Controller controller = PRAVEGA.getLocalController();
    controller.createScope("unreadbytes").get();
    controller.createStream("unreadbytes", streamName, config).get();
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
    String group = "testUnreadBytesWithCheckpointsAndStreamCuts-group";
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName).build());
    @Cleanup ReaderGroup readerGroup = groupManager.getReaderGroup(group);
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build());
    long unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
    writer.writeEvent("0", "data of size 30").get();
    writer.writeEvent("0", "data of size 30").get();
    EventRead<String> firstEvent = reader.readNextEvent(15000);
    EventRead<String> secondEvent = reader.readNextEvent(15000);
    assertNotNull(firstEvent);
    assertEquals("data of size 30", firstEvent.getEvent());
    assertNotNull(secondEvent);
    assertEquals("data of size 30", secondEvent.getEvent());
    // trigger a checkpoint.
    CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService());
    EventRead<String> chkpointEvent = reader.readNextEvent(15000);
    assertEquals("test", chkpointEvent.getCheckpointName());
    EventRead<String> emptyEvent = reader.readNextEvent(100);
    assertEquals(false, emptyEvent.isCheckpoint());
    assertEquals(null, emptyEvent.getEvent());
    chkPointResult.join();
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
    // starting from checkpoint "test", data of size 30 is read
    writer.writeEvent("0", "data of size 30").get();
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30);
    // trigger a stream-cut
    CompletableFuture<Map<Stream, StreamCut>> scResult = readerGroup.generateStreamCuts(executorService());
    EventRead<String> scEvent = reader.readNextEvent(15000);
    reader.readNextEvent(100);
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 30);
    // starting from checkpoint "test", data of size 60 is written => stream-cut does not change last checkpointed position
    writer.writeEvent("0", "data of size 30").get();
    unreadBytes = readerGroup.getMetrics().unreadBytes();
    assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 60);
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Controller(io.pravega.client.control.impl.Controller) Cleanup(lombok.Cleanup) Checkpoint(io.pravega.client.stream.Checkpoint) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Example 28 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class EndToEndReaderGroupTest method testGenerateStreamCutsWithScaling.

@Test(timeout = 40000)
public void testGenerateStreamCutsWithScaling() throws Exception {
    String streamName = "testGenerateStreamCutsWithScaling";
    final Stream stream = Stream.of(SCOPE, streamName);
    final String group = "testGenerateStreamCutsWithScaling-group";
    createScope(SCOPE);
    createStream(SCOPE, streamName, ScalingPolicy.fixed(2));
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
    // Prep the stream with data.
    // 1.Write 2 events with event size of 30 to Segment 0.
    writer.writeEvent(keyGenerator.apply("0.1"), getEventData.apply(0)).join();
    writer.writeEvent(keyGenerator.apply("0.1"), getEventData.apply(0)).join();
    // 2. Write 2 events with event size of 30 to Segment 1.
    writer.writeEvent(keyGenerator.apply("0.9"), getEventData.apply(1)).join();
    writer.writeEvent(keyGenerator.apply("0.9"), getEventData.apply(1)).join();
    // 3. Manually scale stream. Split Segment 0 to Segment 2, Segment 3
    Map<Double, Double> newKeyRanges = new HashMap<>();
    newKeyRanges.put(0.0, 0.25);
    newKeyRanges.put(0.25, 0.5);
    newKeyRanges.put(0.5, 1.0);
    scaleStream(streamName, newKeyRanges);
    // 4. Write events to segment 2
    writer.writeEvent(keyGenerator.apply("0.1"), getEventData.apply(2));
    // 5. Write events to segment 3
    writer.writeEvent(keyGenerator.apply("0.3"), getEventData.apply(3));
    // 6. Write events to Segment 1.
    writer.writeEvent(keyGenerator.apply("0.9"), getEventData.apply(1));
    @Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, PRAVEGA.getControllerURI());
    groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(200).stream(stream).build());
    ReaderGroup readerGroup = groupManager.getReaderGroup(group);
    // 7. Create two readers and read 1 event from both the readers
    @Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("reader1", group, serializer, ReaderConfig.builder().build());
    @Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("reader2", group, serializer, ReaderConfig.builder().build());
    // 8. Read 1 event from both the readers.
    String reader1Event = reader1.readNextEvent(15000).getEvent();
    String reader2Event = reader2.readNextEvent(15000).getEvent();
    // 9. Read all events from segment 0.
    if (reader1Event.equalsIgnoreCase(getEventData.apply(0))) {
        assertEquals(getEventData.apply(0), reader1.readNextEvent(15000).getEvent());
        assertEquals(getEventData.apply(1), reader2Event);
        readAndVerify(reader2, 1);
    } else {
        assertEquals(getEventData.apply(1), reader1.readNextEvent(15000).getEvent());
        assertEquals(getEventData.apply(0), reader2Event);
        readAndVerify(reader2, 0);
    }
    // Readers see the empty segments
    EventRead<String> data = reader2.readNextEvent(100);
    assertNull(data.getEvent());
    data = reader1.readNextEvent(100);
    assertNull(data.getEvent());
    @Cleanup("shutdown") InlineExecutor backgroundExecutor = new InlineExecutor();
    readerGroup.initiateCheckpoint("cp1", backgroundExecutor);
    data = reader1.readNextEvent(5000);
    assertEquals("cp1", data.getCheckpointName());
    data = reader2.readNextEvent(5000);
    assertEquals("cp1", data.getCheckpointName());
    // New segments are available to read
    reader1Event = reader1.readNextEvent(5000).getEvent();
    assertNotNull(reader1Event);
    reader2Event = reader2.readNextEvent(5000).getEvent();
    assertNotNull(reader2Event);
    // 10. Generate StreamCuts
    CompletableFuture<Map<Stream, StreamCut>> sc = readerGroup.generateStreamCuts(backgroundExecutor);
    // The reader group state will be updated after 1 second.
    TimeUnit.SECONDS.sleep(1);
    reader1Event = reader1.readNextEvent(500).getEvent();
    reader2Event = reader2.readNextEvent(500).getEvent();
    // 11 Validate the StreamCut generated.
    // wait until the streamCut is obtained.
    assertTrue(Futures.await(sc));
    Set<Segment> expectedSegments = ImmutableSet.<Segment>builder().add(// 1 event read from segment 1
    getSegment(streamName, 4, 1)).add(// 1 event read from segment 2 or 3.
    getSegment(streamName, 2, 1)).add(getSegment(streamName, 3, 1)).build();
    Map<Stream, StreamCut> scMap = sc.join();
    assertEquals("StreamCut for a single stream expected", 1, scMap.size());
    assertEquals(expectedSegments, scMap.get(stream).asImpl().getPositions().keySet());
}
Also used : ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamCut(io.pravega.client.stream.StreamCut) HashMap(java.util.HashMap) ReaderGroup(io.pravega.client.stream.ReaderGroup) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup) Segment(io.pravega.client.segment.impl.Segment) InlineExecutor(io.pravega.test.common.InlineExecutor) Stream(io.pravega.client.stream.Stream) HashMap(java.util.HashMap) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Test(org.junit.Test)

Example 29 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class EndToEndReaderGroupTest method writeTestEvent.

private void writeTestEvent(String scope, String streamName, int eventId) {
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
    writer.writeEvent("0", Integer.toString(eventId)).join();
}
Also used : EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup)

Example 30 with EventStreamClientFactory

use of io.pravega.client.EventStreamClientFactory in project pravega by pravega.

the class PravegaSanityTests method testWriteAndReadAnEvent.

/**
 * Method to test Pravega by writing an event to a stream and reading from it.
 * This method creates the scope and streams too.
 * @param scope Pravega Scope name.
 * @param stream Pravega Stream name.
 * @param message Message to be written.
 * @param clientConfig ClientConfig for Pravega.
 */
public static void testWriteAndReadAnEvent(String scope, String stream, String message, ClientConfig clientConfig) {
    int numSegments = 1;
    @Cleanup StreamManager streamManager = StreamManager.create(clientConfig);
    assertNotNull(streamManager);
    boolean isScopeCreated = streamManager.createScope(scope);
    assertTrue("Failed to create scope", isScopeCreated);
    boolean isStreamCreated = streamManager.createStream(scope, stream, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(numSegments)).build());
    Assert.assertTrue("Failed to create the stream ", isStreamCreated);
    log.info("write an event");
    @Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, clientConfig);
    // Write an event to the stream.
    @Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(stream, new JavaSerializer<String>(), EventWriterConfig.builder().build());
    writer.writeEvent(message);
    writer.flush();
    log.debug("Done writing message '{}' to stream '{} / {}'", message, scope, stream);
    // Now, read the event from the stream.
    String readerGroup = UUID.randomUUID().toString().replace("-", "");
    ReaderGroupConfig readerGroupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, stream)).disableAutomaticCheckpoints().build();
    @Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(scope, clientConfig);
    readerGroupManager.createReaderGroup(readerGroup, readerGroupConfig);
    @Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", readerGroup, new JavaSerializer<String>(), ReaderConfig.builder().initialAllocationDelay(0).build());
    // Keeping the read timeout large so that there is ample time for reading the event even in
    // case of abnormal delays in test environments.
    String readMessage = reader.readNextEvent(10000).getEvent();
    log.info("Done reading event [{}]", readMessage);
    assertEquals(message, readMessage);
}
Also used : ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) ReaderGroupManager(io.pravega.client.admin.ReaderGroupManager) StreamManager(io.pravega.client.admin.StreamManager) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) Cleanup(lombok.Cleanup)

Aggregations

EventStreamClientFactory (io.pravega.client.EventStreamClientFactory)57 Cleanup (lombok.Cleanup)50 Test (org.junit.Test)41 ReaderGroupManager (io.pravega.client.admin.ReaderGroupManager)36 ClientConfig (io.pravega.client.ClientConfig)21 ReaderGroup (io.pravega.client.stream.ReaderGroup)19 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)19 StreamCut (io.pravega.client.stream.StreamCut)19 HashMap (java.util.HashMap)18 StreamManager (io.pravega.client.admin.StreamManager)17 ReaderGroupConfig (io.pravega.client.stream.ReaderGroupConfig)16 Stream (io.pravega.client.stream.Stream)16 Map (java.util.Map)16 Segment (io.pravega.client.segment.impl.Segment)13 EventWriterConfig (io.pravega.client.stream.EventWriterConfig)12 Controller (io.pravega.client.control.impl.Controller)11 EventStreamWriter (io.pravega.client.stream.EventStreamWriter)10 JavaSerializer (io.pravega.client.stream.impl.JavaSerializer)10 Futures (io.pravega.common.concurrent.Futures)10 ConnectionFactory (io.pravega.client.connection.impl.ConnectionFactory)9