use of io.pravega.client.ClientFactory in project pravega by pravega.
the class StreamSeekTest method testStreamSeek.
@Test(timeout = 50000)
public void testStreamSeek() throws Exception {
createScope(SCOPE);
createStream(STREAM1);
createStream(STREAM2);
@Cleanup ClientFactory clientFactory = ClientFactory.withScope(SCOPE, controllerUri);
@Cleanup EventStreamWriter<String> writer1 = clientFactory.createEventWriter(STREAM1, serializer, EventWriterConfig.builder().build());
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, controllerUri);
ReaderGroup readerGroup = groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM1)).stream(Stream.of(SCOPE, STREAM2)).build());
// Prep the stream with data.
// 1.Write two events with event size of 30
writer1.writeEvent(keyGenerator.get(), getEventData.apply(1)).get();
writer1.writeEvent(keyGenerator.get(), getEventData.apply(2)).get();
// 2.Scale stream
Map<Double, Double> newKeyRanges = new HashMap<>();
newKeyRanges.put(0.0, 0.33);
newKeyRanges.put(0.33, 0.66);
newKeyRanges.put(0.66, 1.0);
scaleStream(STREAM1, newKeyRanges);
// 3.Write three events with event size of 30
writer1.writeEvent(keyGenerator.get(), getEventData.apply(3)).get();
writer1.writeEvent(keyGenerator.get(), getEventData.apply(4)).get();
writer1.writeEvent(keyGenerator.get(), getEventData.apply(5)).get();
// Create a reader
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "group", serializer, ReaderConfig.builder().build());
// Offset of a streamCut is always set to zero.
// Stream cut 1
Map<Stream, StreamCut> streamCut1 = readerGroup.getStreamCuts();
readAndVerify(reader, 1, 2);
readAndVerify(reader, 3, 4, 5);
// Stream cut 2
Map<Stream, StreamCut> streamCut2 = readerGroup.getStreamCuts();
// reset the readers to offset 0.
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromStreamCuts(streamCut1).build());
verifyReinitializationRequiredException(reader);
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId", "group", serializer, ReaderConfig.builder().build());
// verify that we are at streamCut1
readAndVerify(reader1, 1, 2);
// reset readers to post scale offset 0
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromStreamCuts(streamCut2).build());
verifyReinitializationRequiredException(reader1);
@Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("readerId", "group", serializer, ReaderConfig.builder().build());
// verify that we are at streamCut2
readAndVerify(reader2, 3, 4, 5);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndStatsTest method testStatsCount.
@Test(timeout = 10000)
public void testStatsCount() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.fixed(1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller);
@Cleanup EventStreamWriter<String> test = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().transactionTimeoutScaleGracePeriod(10000).transactionTimeoutTime(10000).build());
for (int i = 0; i < 10; i++) {
test.writeEvent("test").get();
}
assertEquals(statsRecorder.getSegments().get("test/test/0").get(), 10);
Transaction<String> transaction = test.beginTxn();
for (int i = 0; i < 10; i++) {
transaction.writeEvent("0", "txntest1");
}
assertEquals(statsRecorder.getSegments().get("test/test/0").get(), 10);
transaction.commit();
Stream stream = new StreamImpl("test", "test");
while (!controller.checkTransactionStatus(stream, transaction.getTxnId()).get().equals(Transaction.Status.COMMITTED)) {
Thread.sleep(100);
}
assertEquals(statsRecorder.getSegments().get("test/test/0").get(), 20);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndTruncationTest method testTruncation.
@Test(timeout = 30000)
public void testTruncation() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 2)).build();
LocalController controller = (LocalController) controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "truncationTest1").get();
// scale
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Lists.newArrayList(0, 1), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "truncationTest2").get();
Map<Integer, Long> streamCutPositions = new HashMap<>();
streamCutPositions.put(2, 0L);
streamCutPositions.put(3, 0L);
streamCutPositions.put(4, 0L);
controller.truncateStream(stream.getStreamName(), stream.getStreamName(), streamCutPositions).join();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("truncationTest2", event.getEvent());
event = reader.readNextEvent(1000);
assertNull(event.getEvent());
// TODO: test more scenarios like: issue #2011
// 1. get a valid stream cut with offset > 0
// validate truncation within a segment
// 2. have an existing reader reading from non truncated segment and then truncate the segment.
// verify that reader gets appropriate response and handles it successfully.
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndWithScaleTest method testScale.
@Test(timeout = 30000)
public void testScale() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "txntest1").get();
// scale
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "txntest2").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest1", event.getEvent());
event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest2", event.getEvent());
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class StreamTransactionMetadataTasksTest method createEventProcessor.
private <T extends ControllerEvent> void createEventProcessor(final String readerGroupName, final String streamName, final EventStreamReader<T> reader, final EventStreamWriter<T> writer, Supplier<EventProcessor<T>> factory) throws CheckpointStoreException {
ClientFactory clientFactory = Mockito.mock(ClientFactory.class);
Mockito.when(clientFactory.<T>createReader(anyString(), anyString(), any(), any())).thenReturn(reader);
Mockito.when(clientFactory.<T>createEventWriter(anyString(), any(), any())).thenReturn(writer);
ReaderGroup readerGroup = Mockito.mock(ReaderGroup.class);
Mockito.when(readerGroup.getGroupName()).thenReturn(readerGroupName);
ReaderGroupManager readerGroupManager = Mockito.mock(ReaderGroupManager.class);
Mockito.when(readerGroupManager.createReaderGroup(anyString(), any(ReaderGroupConfig.class))).then(invocation -> readerGroup);
EventProcessorSystemImpl system = new EventProcessorSystemImpl("system", "host", SCOPE, clientFactory, readerGroupManager);
EventProcessorGroupConfig eventProcessorConfig = EventProcessorGroupConfigImpl.builder().eventProcessorCount(1).readerGroupName(readerGroupName).streamName(streamName).checkpointConfig(CheckpointConfig.periodic(1, 1)).build();
EventProcessorConfig<T> config = EventProcessorConfig.<T>builder().config(eventProcessorConfig).decider(ExceptionHandler.DEFAULT_EXCEPTION_HANDLER).serializer(new JavaSerializer<>()).supplier(factory).build();
system.createEventProcessorGroup(config, CheckpointStoreFactory.createInMemoryStore());
}
Aggregations