use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class CheckpointTest method testCheckpointAndRestore.
@Test(timeout = 20000)
public void testCheckpointAndRestore() throws ReinitializationRequiredException, InterruptedException, ExecutionException, TimeoutException {
String endpoint = "localhost";
String streamName = "testCheckpointAndRestore";
String readerName = "reader";
String readerGroupName = "testCheckpointAndRestore-group";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
String scope = "Scope1";
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(scope, endpoint, port);
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(scope, streamName)).build();
streamManager.createScope(scope);
streamManager.createStream(scope, streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build());
streamManager.createReaderGroup(readerGroupName, groupConfig);
@Cleanup ReaderGroup readerGroup = streamManager.getReaderGroup(readerGroupName);
JavaSerializer<String> serializer = new JavaSerializer<>();
@Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
producer.writeEvent(testString);
producer.writeEvent(testString);
producer.writeEvent(testString);
producer.flush();
AtomicLong clock = new AtomicLong();
@Cleanup EventStreamReader<String> reader = clientFactory.createReader(readerName, readerGroupName, serializer, ReaderConfig.builder().build(), clock::get, clock::get);
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
EventRead<String> read = reader.readNextEvent(60000);
assertEquals(testString, read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
read = reader.readNextEvent(60000);
assertEquals(testString, read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
@Cleanup("shutdown") final InlineExecutor backgroundExecutor = new InlineExecutor();
CompletableFuture<Checkpoint> checkpoint = readerGroup.initiateCheckpoint("Checkpoint", backgroundExecutor);
assertFalse(checkpoint.isDone());
read = reader.readNextEvent(60000);
assertTrue(read.isCheckpoint());
assertEquals("Checkpoint", read.getCheckpointName());
assertNull(read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
read = reader.readNextEvent(60000);
assertEquals(testString, read.getEvent());
Checkpoint cpResult = checkpoint.get(5, TimeUnit.SECONDS);
assertTrue(checkpoint.isDone());
assertEquals("Checkpoint", cpResult.getName());
read = reader.readNextEvent(100);
assertNull(read.getEvent());
assertFalse(read.isCheckpoint());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromCheckpoint(cpResult).disableAutomaticCheckpoints().build());
try {
reader.readNextEvent(60000);
fail();
} catch (ReinitializationRequiredException e) {
// Expected
}
reader.close();
reader = clientFactory.createReader(readerName, readerGroupName, serializer, ReaderConfig.builder().build());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
read = reader.readNextEvent(60000);
assertEquals(testString, read.getEvent());
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
read = reader.readNextEvent(100);
assertNull(read.getEvent());
assertFalse(read.isCheckpoint());
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class CheckpointTest method testMaxPendingCheckpoint.
@Test(timeout = 20000)
public void testMaxPendingCheckpoint() throws ReinitializationRequiredException, InterruptedException, ExecutionException, TimeoutException {
String endpoint = "localhost";
String streamName = "testGenerateStreamCuts";
String readerGroupName = "testGenerateStreamCuts-group1";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
String scope = "Scope12";
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(scope, endpoint, port);
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
int maxOutstandingCheckpointRequest = 1;
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, streamName)).maxOutstandingCheckpointRequest(maxOutstandingCheckpointRequest).build();
streamManager.createScope(scope);
streamManager.createStream(scope, streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build());
streamManager.createReaderGroup(readerGroupName, groupConfig);
@Cleanup ReaderGroup readerGroup = streamManager.getReaderGroup(readerGroupName);
JavaSerializer<String> serializer = new JavaSerializer<>();
@Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
producer.writeEvent(testString);
producer.writeEvent(testString);
producer.writeEvent(testString);
producer.flush();
AtomicLong clock = new AtomicLong();
@Cleanup EventStreamReader<String> reader1 = clientFactory.createReader("reader1", readerGroupName, serializer, ReaderConfig.builder().build(), clock::get, clock::get);
@Cleanup EventStreamReader<String> reader2 = clientFactory.createReader("reader2", readerGroupName, serializer, ReaderConfig.builder().build(), clock::get, clock::get);
clock.addAndGet(CLOCK_ADVANCE_INTERVAL);
@Cleanup("shutdown") final InlineExecutor backgroundExecutor1 = new InlineExecutor();
@Cleanup("shutdown") final InlineExecutor backgroundExecutor2 = new InlineExecutor();
CompletableFuture<Checkpoint> checkpoint1 = readerGroup.initiateCheckpoint("Checkpoint1", backgroundExecutor1);
assertFalse(checkpoint1.isDone());
CompletableFuture<Checkpoint> checkpoint2 = readerGroup.initiateCheckpoint("Checkpoint2", backgroundExecutor2);
assertTrue(checkpoint2.isCompletedExceptionally());
try {
checkpoint2.get();
} catch (ExecutionException e) {
assertTrue(e.getCause() instanceof MaxNumberOfCheckpointsExceededException);
assertTrue(e.getCause().getMessage().equals("rejecting checkpoint request since pending checkpoint reaches max allowed limit"));
}
EventRead<String> read = reader1.readNextEvent(100);
assertTrue(read.isCheckpoint());
assertEquals("Checkpoint1", read.getCheckpointName());
assertNull(read.getEvent());
read = reader2.readNextEvent(100);
assertTrue(read.isCheckpoint());
assertEquals("Checkpoint1", read.getCheckpointName());
assertNull(read.getEvent());
read = reader1.readNextEvent(100);
assertFalse(read.isCheckpoint());
read = reader2.readNextEvent(100);
assertFalse(read.isCheckpoint());
Checkpoint cpResult = checkpoint1.get(5, TimeUnit.SECONDS);
assertTrue(checkpoint1.isDone());
assertEquals("Checkpoint1", cpResult.getName());
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ControllerStreamMetadataTest method setUp.
@Before
public void setUp() throws Exception {
final int controllerPort = TestUtils.getAvailableListenPort();
final String serviceHost = "localhost";
final int servicePort = TestUtils.getAvailableListenPort();
final int containerCount = 4;
try {
// 1. Start ZK
this.zkTestServer = new TestingServerStarter().start();
// 2. Start Pravega service.
serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
this.server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
this.server.startListening();
// 3. Start controller
this.controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), false, controllerPort, serviceHost, servicePort, containerCount);
this.controllerWrapper.awaitRunning();
this.controller = controllerWrapper.getController();
this.streamConfiguration = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
} catch (Exception e) {
log.error("Error during setup", e);
throw e;
}
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class BatchClientTest method setUp.
@Before
public void setUp() throws Exception {
zkTestServer = new TestingServerStarter().start();
// Create and start segment store service
serviceBuilder = createServiceBuilder();
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
// Create and start controller service
controllerWrapper = createControllerWrapper();
controllerWrapper.awaitRunning();
serializer = new JavaSerializer<>();
clientConfig = createClientConfig();
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class IdleSegmentTest method testByteBufferEventsWithIdleSegments.
@Test(timeout = 5000)
public void testByteBufferEventsWithIdleSegments() throws ReinitializationRequiredException {
String endpoint = "localhost";
String streamName = "abc";
String readerName = "reader";
String readerGroup = "group";
int port = TestUtils.getAvailableListenPort();
ByteBuffer testPayload = ByteBuffer.allocate(100);
String scope = "Scope1";
StreamSegmentStore store = this.serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(scope, endpoint, port);
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, streamName)).disableAutomaticCheckpoints().build();
streamManager.createScope(scope);
streamManager.createStream(scope, streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(20)).build());
streamManager.createReaderGroup(readerGroup, groupConfig);
Serializer<ByteBuffer> serializer = new ByteBufferSerializer();
@Cleanup EventStreamWriter<ByteBuffer> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
List<CompletableFuture<Void>> results = new ArrayList<>();
for (int i = 0; i < 10; i++) {
results.add(producer.writeEvent("FixedRoutingKey", testPayload));
System.out.println("Writing event " + i);
}
producer.flush();
@Cleanup EventStreamReader<ByteBuffer> reader = clientFactory.createReader(readerName, readerGroup, serializer, ReaderConfig.builder().build());
for (int i = 0; i < 10; i++) {
ByteBuffer read = reader.readNextEvent(10000).getEvent();
assertEquals(testPayload, read);
}
}
Aggregations