use of io.pravega.client.stream.impl.Controller in project pravega by pravega.
the class ControllerEventProcessorsTest method testHandleOrphaned.
@Test(timeout = 10000)
public void testHandleOrphaned() {
Controller localController = mock(Controller.class);
CheckpointStore checkpointStore = mock(CheckpointStore.class);
StreamMetadataStore streamStore = mock(StreamMetadataStore.class);
HostControllerStore hostStore = mock(HostControllerStore.class);
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
StreamMetadataTasks streamMetadataTasks = mock(StreamMetadataTasks.class);
StreamTransactionMetadataTasks streamTransactionMetadataTasks = mock(StreamTransactionMetadataTasks.class);
executor = Executors.newSingleThreadScheduledExecutor();
ControllerEventProcessorConfig config = ControllerEventProcessorConfigImpl.withDefault();
EventProcessorSystem system = mock(EventProcessorSystem.class);
EventProcessorGroup<ControllerEvent> processor = new EventProcessorGroup<ControllerEvent>() {
@Override
public void notifyProcessFailure(String process) throws CheckpointStoreException {
}
@Override
public EventStreamWriter<ControllerEvent> getWriter() {
return null;
}
@Override
public Set<String> getProcesses() throws CheckpointStoreException {
return Sets.newHashSet("host1", "host2");
}
@Override
public Service startAsync() {
return null;
}
@Override
public boolean isRunning() {
return false;
}
@Override
public State state() {
return null;
}
@Override
public Service stopAsync() {
return null;
}
@Override
public void awaitRunning() {
}
@Override
public void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException {
}
@Override
public void awaitTerminated() {
}
@Override
public void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException {
}
@Override
public Throwable failureCause() {
return null;
}
@Override
public void addListener(Listener listener, Executor executor) {
}
@Override
public void close() throws Exception {
}
};
try {
when(system.createEventProcessorGroup(any(), any())).thenReturn(processor);
} catch (CheckpointStoreException e) {
e.printStackTrace();
}
ControllerEventProcessors processors = new ControllerEventProcessors("host1", config, localController, checkpointStore, streamStore, hostStore, SegmentHelperMock.getSegmentHelperMock(), connectionFactory, streamMetadataTasks, system, executor);
processors.startAsync();
processors.awaitRunning();
assertTrue(Futures.await(processors.sweepFailedProcesses(() -> Sets.newHashSet("host1"))));
assertTrue(Futures.await(processors.handleFailedProcess("host1")));
processors.shutDown();
}
use of io.pravega.client.stream.impl.Controller in project pravega by pravega.
the class ReadTest method readThroughSegmentClient.
@Test
public void readThroughSegmentClient() throws SegmentSealedException, EndOfSegmentException, SegmentTruncatedException {
String endpoint = "localhost";
String scope = "scope";
String stream = "stream";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
StreamSegmentStore store = this.serviceBuilder.createStreamSegmentService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store);
server.startListening();
ConnectionFactory clientCF = new ConnectionFactoryImpl(ClientConfig.builder().build());
Controller controller = new MockController(endpoint, port, clientCF);
controller.createScope(scope);
controller.createStream(StreamConfiguration.builder().scope(scope).streamName(stream).build());
SegmentOutputStreamFactoryImpl segmentproducerClient = new SegmentOutputStreamFactoryImpl(controller, clientCF);
SegmentInputStreamFactoryImpl segmentConsumerClient = new SegmentInputStreamFactoryImpl(controller, clientCF);
Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
@Cleanup("close") SegmentOutputStream out = segmentproducerClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), "");
out.write(new PendingEvent(null, ByteBuffer.wrap(testString.getBytes()), new CompletableFuture<>()));
out.flush();
@Cleanup("close") SegmentInputStream in = segmentConsumerClient.createInputStreamForSegment(segment);
ByteBuffer result = in.read();
assertEquals(ByteBuffer.wrap(testString.getBytes()), result);
// Test large write followed by read
out.write(new PendingEvent(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
out.write(new PendingEvent(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
out.write(new PendingEvent(null, ByteBuffer.wrap(new byte[150000]), new CompletableFuture<>()));
assertEquals(in.read().capacity(), 15);
assertEquals(in.read().capacity(), 15);
assertEquals(in.read().capacity(), 150000);
}
use of io.pravega.client.stream.impl.Controller in project pravega by pravega.
the class ReaderGroupNotificationTest method testEndOfStreamNotifications.
@Test(timeout = 40000)
public void testEndOfStreamNotifications() throws Exception {
final String streamName = "stream2";
StreamConfiguration config = StreamConfiguration.builder().scope(SCOPE).streamName(streamName).scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(SCOPE).get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl(SCOPE, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "data1").get();
// scale
Stream stream = new StreamImpl(SCOPE, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "data2").get();
// seal stream
assertTrue(controller.sealStream(SCOPE, streamName).get());
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(SCOPE, controller, clientFactory, connectionFactory);
ReaderGroup readerGroup = groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, streamName)).build());
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
// Add segment event listener
Listener<EndOfDataNotification> l1 = notification -> {
listenerInvoked.set(true);
listenerLatch.release();
};
readerGroup.getEndOfDataNotifier(executor).registerListener(l1);
EventRead<String> event1 = reader1.readNextEvent(10000);
EventRead<String> event2 = reader1.readNextEvent(10000);
EventRead<String> event3 = reader1.readNextEvent(10000);
assertNotNull(event1);
assertEquals("data1", event1.getEvent());
assertNotNull(event2);
assertEquals("data2", event2.getEvent());
assertNull(event3.getEvent());
listenerLatch.await();
assertTrue("Listener invoked", listenerInvoked.get());
}
use of io.pravega.client.stream.impl.Controller in project pravega by pravega.
the class ReaderGroupNotificationTest method testSegmentNotifications.
@Test(timeout = 40000)
public void testSegmentNotifications() throws Exception {
final String streamName = "stream1";
StreamConfiguration config = StreamConfiguration.builder().scope(SCOPE).streamName(streamName).scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(SCOPE).get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl(SCOPE, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "data1").get();
// scale
Stream stream = new StreamImpl(SCOPE, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "data2").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(SCOPE, controller, clientFactory, connectionFactory);
ReaderGroup readerGroup = groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, streamName)).build());
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
// Add segment event listener
Listener<SegmentNotification> l1 = notification -> {
listenerInvoked.set(true);
numberOfReaders.set(notification.getNumOfReaders());
numberOfSegments.set(notification.getNumOfSegments());
listenerLatch.release();
};
readerGroup.getSegmentNotifier(executor).registerListener(l1);
EventRead<String> event1 = reader1.readNextEvent(15000);
EventRead<String> event2 = reader1.readNextEvent(15000);
assertNotNull(event1);
assertEquals("data1", event1.getEvent());
assertNotNull(event2);
assertEquals("data2", event2.getEvent());
listenerLatch.await();
assertTrue("Listener invoked", listenerInvoked.get());
assertEquals(2, numberOfSegments.get());
assertEquals(1, numberOfReaders.get());
}
use of io.pravega.client.stream.impl.Controller in project pravega by pravega.
the class UnreadBytesTest method testUnreadBytes.
@Test(timeout = 50000)
public void testUnreadBytes() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("unreadbytes").streamName("unreadbytes").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("unreadbytes").get();
controller.createStream(config).get();
@Cleanup ClientFactory clientFactory = ClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(controllerUri).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("unreadbytes", new JavaSerializer<>(), EventWriterConfig.builder().build());
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(controllerUri).build());
ReaderGroup readerGroup = groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/unreadbytes").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "group", new JavaSerializer<>(), ReaderConfig.builder().build());
long unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
writer.writeEvent("0", "data of size 30").get();
writer.writeEvent("0", "data of size 30").get();
EventRead<String> firstEvent = reader.readNextEvent(15000);
EventRead<String> secondEvent = reader.readNextEvent(15000);
assertNotNull(firstEvent);
assertEquals("data of size 30", firstEvent.getEvent());
assertNotNull(secondEvent);
assertEquals("data of size 30", secondEvent.getEvent());
// trigger a checkpoint.
CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executor);
EventRead<String> chkpointEvent = reader.readNextEvent(15000);
assertEquals("test", chkpointEvent.getCheckpointName());
chkPointResult.join();
unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 0);
writer.writeEvent("0", "data of size 30").get();
unreadBytes = readerGroup.getMetrics().unreadBytes();
assertTrue("Unread bytes: " + unreadBytes, unreadBytes == 30);
}
Aggregations