use of io.pravega.test.common.InlineExecutor in project pravega by pravega.
the class EndToEndReaderGroupTest method testReaderOfflineWithSilentCheckpoint.
@Test(timeout = 30000)
public void testReaderOfflineWithSilentCheckpoint() throws Exception {
String streamName = "testReaderOfflineWithSilentCheckpoint";
final Stream stream = Stream.of(SCOPE, streamName);
final String group = "testReaderOfflineWithSilentCheckpoint-group";
@Cleanup("shutdown") InlineExecutor backgroundExecutor = new InlineExecutor();
createScope(SCOPE);
createStream(SCOPE, streamName, ScalingPolicy.fixed(1));
@Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
// Prep the stream with data.
// 1.Write events with event size of 30
writer.writeEvent(randomKeyGenerator.get(), getEventData.apply(1)).join();
writer.writeEvent(randomKeyGenerator.get(), getEventData.apply(2)).join();
writer.writeEvent(randomKeyGenerator.get(), getEventData.apply(3)).join();
writer.writeEvent(randomKeyGenerator.get(), getEventData.apply(4)).join();
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, PRAVEGA.getControllerURI());
groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(1000).stream(stream).build());
ReaderGroup readerGroup = groupManager.getReaderGroup(group);
// Create a reader
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, serializer, ReaderConfig.builder().build());
// 2. Read an event.
readAndVerify(reader, 1);
// 3. Trigger a checkpoint and verify it is completed.
CompletableFuture<Checkpoint> checkpoint = readerGroup.initiateCheckpoint("chk1", backgroundExecutor);
// The reader group state will be updated after 1 second.
TimeUnit.SECONDS.sleep(1);
EventRead<String> data = reader.readNextEvent(15000);
assertTrue(data.isCheckpoint());
readAndVerify(reader, 2);
assertTrue("Checkpointing should complete successfully", Futures.await(checkpoint));
// 4. GenerateStreamCuts and validate the offset of stream cut.
CompletableFuture<Map<Stream, StreamCut>> sc = readerGroup.generateStreamCuts(backgroundExecutor);
// The reader group state will be updated after 1 second.
TimeUnit.SECONDS.sleep(1);
data = reader.readNextEvent(15000);
assertTrue("StreamCut generation should complete successfully", Futures.await(sc));
// expected segment 0 offset is 60L, since 2 events are read.
Map<Segment, Long> expectedOffsetMap = ImmutableMap.of(getSegment(streamName, 0, 0), 60L);
Map<Stream, StreamCut> scMap = sc.join();
assertEquals("StreamCut for a single stream expected", 1, scMap.size());
assertEquals("StreamCut pointing ot offset 30L expected", new StreamCutImpl(stream, expectedOffsetMap), scMap.get(stream));
// 5. Invoke readerOffline with last position as null. The newer readers should start reading
// from the last checkpointed position
readerGroup.readerOffline("readerId", null);
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("readerId", group, serializer, ReaderConfig.builder().build());
readAndVerify(reader1, 2);
}
use of io.pravega.test.common.InlineExecutor in project pravega by pravega.
the class EndToEndChannelLeakTest method setUp.
@Before
public void setUp() throws Exception {
executor = new InlineExecutor();
zkTestServer = new TestingServerStarter().start();
serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
server = new PravegaConnectionListener(false, servicePort, store, tableStore, this.serviceBuilder.getLowPriorityExecutor());
server.startListening();
controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), false, controllerPort, serviceHost, servicePort, containerCount);
controllerWrapper.awaitRunning();
}
use of io.pravega.test.common.InlineExecutor in project pravega by pravega.
the class MultiKeyLatestItemSequentialProcessorTest method testRunsItems.
@Test
public void testRunsItems() {
@Cleanup("shutdown") InlineExecutor executor = new InlineExecutor();
AtomicBoolean ran = new AtomicBoolean(false);
MultiKeyLatestItemSequentialProcessor<String, String> processor = new MultiKeyLatestItemSequentialProcessor<>((k, v) -> ran.set(true), executor);
processor.updateItem("k1", "Foo");
assertTrue(ran.get());
}
use of io.pravega.test.common.InlineExecutor in project pravega by pravega.
the class AppendProcessorTest method testSetupTokenExpiryTaskClosesConnectionIfTokenHasExpired.
@Test
public void testSetupTokenExpiryTaskClosesConnectionIfTokenHasExpired() {
// Arrange
String streamSegmentName = "scope/stream/0.#epoch.0";
UUID clientId = UUID.randomUUID();
StreamSegmentStore mockStore = mock(StreamSegmentStore.class);
ServerConnection mockConnection = mock(ServerConnection.class);
@Cleanup("shutdown") ScheduledExecutorService executor = new InlineExecutor();
@Cleanup AppendProcessor processor = AppendProcessor.defaultBuilder().store(mockStore).connection(new TrackedConnection(mockConnection)).tokenExpiryHandlerExecutor(executor).build();
// Spy the actual Append Processor, so that we can have some of the methods return stubbed values.
AppendProcessor mockProcessor = spy(processor);
doReturn(true).when(mockProcessor).isSetupAppendCompleted(streamSegmentName, clientId);
JsonWebToken token = new JsonWebToken("subject", "audience", "secret".getBytes(), Date.from(Instant.now().minusSeconds(5)), null);
SetupAppend setupAppend = new SetupAppend(1, clientId, streamSegmentName, token.toCompactString());
// Act
mockProcessor.setupTokenExpiryTask(setupAppend, token).join();
// Assert
verify(mockConnection).close();
}
use of io.pravega.test.common.InlineExecutor in project pravega by pravega.
the class AppendProcessorTest method testSetupTokenExpiryWhenConnectionSendThrowsException.
@Test
public void testSetupTokenExpiryWhenConnectionSendThrowsException() {
// Arrange
String streamSegmentName = "scope/stream/0.#epoch.0";
UUID clientId = UUID.randomUUID();
StreamSegmentStore mockStore = mock(StreamSegmentStore.class);
ServerConnection mockConnection = mock(ServerConnection.class);
@Cleanup("shutdown") ScheduledExecutorService executor = new InlineExecutor();
@Cleanup AppendProcessor processor = AppendProcessor.defaultBuilder().store(mockStore).connection(new TrackedConnection(mockConnection)).tokenExpiryHandlerExecutor(executor).build();
// Spy the actual Append Processor, so that we can have some of the methods return stubbed values.
AppendProcessor mockProcessor = spy(processor);
doReturn(true).when(mockProcessor).isSetupAppendCompleted(streamSegmentName, clientId);
doThrow(new RuntimeException()).when(mockConnection).send(any());
Date expiryDate = Date.from(Instant.now().plusMillis(300));
JsonWebToken token = new JsonWebToken("subject", "audience", "secret".getBytes(), expiryDate, null);
SetupAppend setupAppend = new SetupAppend(1, clientId, streamSegmentName, token.toCompactString());
// Act
mockProcessor.setupTokenExpiryTask(setupAppend, token).join();
}
Aggregations