use of io.pravega.client.ClientFactory in project pravega by pravega.
the class ReaderGroupNotificationTest method testSegmentNotifications.
@Test(timeout = 40000)
public void testSegmentNotifications() throws Exception {
final String streamName = "stream1";
StreamConfiguration config = StreamConfiguration.builder().scope(SCOPE).streamName(streamName).scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(SCOPE).get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl(SCOPE, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "data1").get();
// scale
Stream stream = new StreamImpl(SCOPE, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "data2").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(SCOPE, controller, clientFactory, connectionFactory);
ReaderGroup readerGroup = groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, streamName)).build());
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
// Add segment event listener
Listener<SegmentNotification> l1 = notification -> {
listenerInvoked.set(true);
numberOfReaders.set(notification.getNumOfReaders());
numberOfSegments.set(notification.getNumOfSegments());
listenerLatch.release();
};
readerGroup.getSegmentNotifier(executor).registerListener(l1);
EventRead<String> event1 = reader1.readNextEvent(15000);
EventRead<String> event2 = reader1.readNextEvent(15000);
assertNotNull(event1);
assertEquals("data1", event1.getEvent());
assertNotNull(event2);
assertEquals("data2", event2.getEvent());
listenerLatch.await();
assertTrue("Listener invoked", listenerInvoked.get());
assertEquals(2, numberOfSegments.get());
assertEquals(1, numberOfReaders.get());
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class UnreadBytesTest method testUnreadBytes.
@Test(timeout = 50000)
public void testUnreadBytes() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("unreadbytes").streamName("unreadbytes").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("unreadbytes").get();
controller.createStream(config).get();
@Cleanup ClientFactory clientFactory = ClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(controllerUri).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("unreadbytes", new JavaSerializer<>(), EventWriterConfig.builder().build());
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(controllerUri).build());
ReaderGroup readerGroup = groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/unreadbytes").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "group", new JavaSerializer<>(), ReaderConfig.builder().build());
long unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
writer.writeEvent("0", "data of size 30").get();
writer.writeEvent("0", "data of size 30").get();
EventRead<String> firstEvent = reader.readNextEvent(15000);
EventRead<String> secondEvent = reader.readNextEvent(15000);
assertNotNull(firstEvent);
assertEquals("data of size 30", firstEvent.getEvent());
assertNotNull(secondEvent);
assertEquals("data of size 30", secondEvent.getEvent());
// trigger a checkpoint.
CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executor);
EventRead<String> chkpointEvent = reader.readNextEvent(15000);
assertEquals("test", chkpointEvent.getCheckpointName());
chkPointResult.join();
unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
writer.writeEvent("0", "data of size 30").get();
unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EventProcessorTest method testEventProcessor.
@Test(timeout = 60000)
public void testEventProcessor() throws Exception {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
int servicePort = TestUtils.getAvailableListenPort();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, servicePort, store);
server.startListening();
int controllerPort = TestUtils.getAvailableListenPort();
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), true, controllerPort, "localhost", servicePort, 4);
controllerWrapper.awaitRunning();
Controller controller = controllerWrapper.getController();
// Create controller object for testing against a separate controller process.
// ControllerImpl controller = new ControllerImpl("localhost", 9090);
final String host = "host";
final String scope = "controllerScope";
final String streamName = "stream1";
final String readerGroup = "readerGroup";
final CompletableFuture<Boolean> createScopeStatus = controller.createScope(scope);
if (!createScopeStatus.join()) {
throw new RuntimeException("Scope already existed");
}
final StreamConfiguration config = StreamConfiguration.builder().scope(scope).streamName(streamName).scalingPolicy(ScalingPolicy.fixed(1)).build();
System.err.println(String.format("Creating stream (%s, %s)", scope, streamName));
CompletableFuture<Boolean> createStatus = controller.createStream(config);
if (!createStatus.get()) {
System.err.println("Stream alrady existed, exiting");
return;
}
@Cleanup ConnectionFactoryImpl connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
@Cleanup EventStreamWriter<TestEvent> producer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
int[] input = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 };
int expectedSum = input.length * (input.length + 1) / 2;
for (int i = 0; i < input.length; i++) {
producer.writeEvent("key", new TestEvent(input[i]));
}
producer.writeEvent("key", new TestEvent(-1));
producer.flush();
EventProcessorSystem system = new EventProcessorSystemImpl("Controller", host, scope, new ClientFactoryImpl(scope, controller, connectionFactory), new ReaderGroupManagerImpl(scope, controller, clientFactory, connectionFactory));
CheckpointConfig.CheckpointPeriod period = CheckpointConfig.CheckpointPeriod.builder().numEvents(1).numSeconds(1).build();
CheckpointConfig checkpointConfig = CheckpointConfig.builder().type(CheckpointConfig.Type.Periodic).checkpointPeriod(period).build();
EventProcessorGroupConfig eventProcessorGroupConfig = EventProcessorGroupConfigImpl.builder().eventProcessorCount(1).readerGroupName(readerGroup).streamName(streamName).checkpointConfig(checkpointConfig).build();
CompletableFuture<Long> result = new CompletableFuture<>();
// Test case 1. Actor does not throw any exception during normal operation.
EventProcessorConfig<TestEvent> eventProcessorConfig = EventProcessorConfig.<TestEvent>builder().supplier(() -> new TestEventProcessor(false, result)).serializer(new JavaSerializer<>()).decider((Throwable e) -> ExceptionHandler.Directive.Stop).config(eventProcessorGroupConfig).build();
@Cleanup EventProcessorGroup<TestEvent> eventProcessorGroup = system.createEventProcessorGroup(eventProcessorConfig, CheckpointStoreFactory.createInMemoryStore());
Long value = result.join();
Assert.assertEquals(expectedSum, value.longValue());
log.info("SUCCESS: received expected sum = " + expectedSum);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndChannelLeakTest method testDetectChannelLeakSegmentSealed.
@Test(timeout = 30000)
public void testDetectChannelLeakSegmentSealed() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope(SCOPE).streamName(STREAM_NAME).scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(SCOPE).get();
controller.createStream(config).get();
@Cleanup ConnectionFactoryImpl connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl(SCOPE, controller, connectionFactory);
// Create a writer.
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(SCOPE, new JavaSerializer<>(), EventWriterConfig.builder().build());
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(SCOPE, controller, clientFactory, connectionFactory);
groupManager.createReaderGroup(READER_GROUP, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM_NAME)).build());
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId1", READER_GROUP, new JavaSerializer<>(), ReaderConfig.builder().build());
// Write an event.
writer.writeEvent("0", "zero").get();
// Read an event.
EventRead<String> event = reader1.readNextEvent(10000);
assertNotNull(event);
assertEquals("zero", event.getEvent());
// scale
Stream stream = new StreamImpl(SCOPE, SCOPE);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
// Write more events.
writer.writeEvent("0", "one").get();
writer.writeEvent("0", "two").get();
writer.writeEvent("1", "three").get();
// store the open channel count before reading.
int channelCount = connectionFactory.getActiveChannelCount();
event = reader1.readNextEvent(10000);
assertNotNull(event.getEvent());
// Number of sockets will increase by 2 ( +3 for the new segments -1 since the older segment is sealed).
assertEquals(channelCount + 2, connectionFactory.getActiveChannelCount());
event = reader1.readNextEvent(10000);
assertNotNull(event.getEvent());
// no changes to socket count.
assertEquals(channelCount + 2, connectionFactory.getActiveChannelCount());
event = reader1.readNextEvent(10000);
assertNotNull(event.getEvent());
// no changes to socket count.
assertEquals(channelCount + 2, connectionFactory.getActiveChannelCount());
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndTxnWithScaleTest method testTxnWithScale.
@Test(timeout = 10000)
public void testTxnWithScale() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> test = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().transactionTimeoutScaleGracePeriod(10000).transactionTimeoutTime(10000).build());
Transaction<String> transaction = test.beginTxn();
transaction.writeEvent("0", "txntest1");
transaction.commit();
// scale
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executorService).getFuture().get();
assertTrue(result);
transaction = test.beginTxn();
transaction.writeEvent("0", "txntest2");
transaction.commit();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest1", event.getEvent());
event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest2", event.getEvent());
}
Aggregations