use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndTruncationTest method testTruncationOffsets.
@Test(timeout = 7000)
public void testTruncationOffsets() throws InterruptedException, ExecutionException, TimeoutException, TruncatedDataException, ReinitializationRequiredException {
String scope = "scope";
String streamName = "testTruncationOffsets";
String testString = "Hello world\n";
LocalController controller = (LocalController) PRAVEGA.getLocalController();
controller.createScope(scope).join();
controller.createStream(scope, streamName, StreamConfiguration.builder().build()).join();
ClientConfig clientConfig = ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(clientConfig);
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
Serializer<String> serializer = new JavaSerializer<>();
@Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
Future<Void> ack = producer.writeEvent(testString);
ack.get(5, TimeUnit.SECONDS);
SegmentMetadataClientFactory metadataClientFactory = new SegmentMetadataClientFactoryImpl(controller, clientFactory.getConnectionPool());
@Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(scope, clientConfig);
Segment segment = new Segment(scope, streamName, 0);
@Cleanup SegmentMetadataClient metadataClient = metadataClientFactory.createSegmentMetadataClient(segment, DelegationTokenProviderFactory.createWithEmptyToken());
assertEquals(0, metadataClient.getSegmentInfo().join().getStartingOffset());
long writeOffset = metadataClient.getSegmentInfo().join().getWriteOffset();
assertEquals(writeOffset, metadataClient.fetchCurrentSegmentLength().join().longValue());
assertTrue(metadataClient.getSegmentInfo().join().getWriteOffset() > testString.length());
metadataClient.truncateSegment(writeOffset).join();
assertEquals(writeOffset, metadataClient.getSegmentInfo().join().getStartingOffset());
assertEquals(writeOffset, metadataClient.getSegmentInfo().join().getWriteOffset());
assertEquals(writeOffset, metadataClient.fetchCurrentSegmentLength().join().longValue());
ack = producer.writeEvent(testString);
ack.get(5, TimeUnit.SECONDS);
String group = "testTruncationOffsets-group";
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(new StreamImpl(scope, streamName)).build();
readerGroupManager.createReaderGroup(group, groupConfig);
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("reader", group, serializer, ReaderConfig.builder().build());
AssertExtensions.assertThrows(TruncatedDataException.class, () -> reader.readNextEvent(2000));
EventRead<String> event = reader.readNextEvent(2000);
assertEquals(testString, event.getEvent());
event = reader.readNextEvent(100);
assertEquals(null, event.getEvent());
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndTruncationTest method testTruncation.
@Test(timeout = 60000)
public void testTruncation() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 2)).build();
String streamName = "testTruncation";
@Cleanup StreamManager streamManager = StreamManager.create(PRAVEGA.getControllerURI());
String scope = "test";
streamManager.createScope(scope);
streamManager.createStream(scope, streamName, config);
ClientConfig clientConfig = ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(clientConfig);
@Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(scope, ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "truncationTest1").get();
// scale
Stream stream = new StreamImpl("test", streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
LocalController controller = (LocalController) PRAVEGA.getLocalController();
Boolean result = controller.scaleStream(stream, Lists.newArrayList(0L, 1L), map, executorService()).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "truncationTest2").get();
Map<Long, Long> streamCutPositions = new HashMap<>();
streamCutPositions.put(computeSegmentId(2, 1), 0L);
streamCutPositions.put(computeSegmentId(3, 1), 0L);
streamCutPositions.put(computeSegmentId(4, 1), 0L);
controller.truncateStream(stream.getScope(), stream.getStreamName(), streamCutPositions).join();
String group = "testTruncation-group";
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", clientConfig, connectionFactory);
groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/" + streamName).build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("truncationTest2", event.getEvent());
event = reader.readNextEvent(1000);
assertNull(event.getEvent());
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndTruncationTest method testWriteDuringScaleAndTruncation.
@Test(timeout = 50000)
public void testWriteDuringScaleAndTruncation() throws Exception {
String streamName = "testWriteDuringScaleAndTruncation";
Stream stream = new StreamImpl("test", streamName);
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 2)).build();
LocalController controller = (LocalController) PRAVEGA.getLocalController();
controller.createScope("test").get();
controller.createStream("test", streamName, config).get();
config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
controller.updateStream("test", streamName, config).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
// routing key "0" translates to key 0.8. This write happens to segment 1.
writer.writeEvent("0", "truncationTest1").get();
// Peform scaling operations on the stream.
ImmutableMap<Double, Double> singleSegmentKeyRange = ImmutableMap.of(0.0, 1.0);
ImmutableMap<Double, Double> twoSegmentKeyRange = ImmutableMap.of(0.0, 0.5, 0.5, 1.0);
// scale down to 1 segment.
assertTrue("Stream Scale down", controller.scaleStream(stream, Lists.newArrayList(0L, 1L), singleSegmentKeyRange, executorService()).getFuture().get());
// scale up to 2 segments.
assertTrue("Stream Scale up", controller.scaleStream(stream, Lists.newArrayList(computeSegmentId(2, 1)), twoSegmentKeyRange, executorService()).getFuture().get());
// scale down to 1 segment.
assertTrue("Stream Scale down", controller.scaleStream(stream, Lists.newArrayList(computeSegmentId(3, 2), computeSegmentId(4, 2)), singleSegmentKeyRange, executorService()).getFuture().get());
// scale up to 2 segments.
assertTrue("Stream Scale up", controller.scaleStream(stream, Lists.newArrayList(computeSegmentId(5, 3)), twoSegmentKeyRange, executorService()).getFuture().get());
// truncateStream.
Map<Long, Long> streamCutPositions = new HashMap<>();
streamCutPositions.put(computeSegmentId(3, 2), 0L);
streamCutPositions.put(computeSegmentId(4, 2), 0L);
assertTrue("Truncate stream", controller.truncateStream("test", streamName, streamCutPositions).get());
// write an event.
writer.writeEvent("0", "truncationTest3");
writer.flush();
// Read the event back.
String group = "testWriteDuringScaleAndTruncation-group";
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory);
groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(0).stream("test/" + streamName).build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(200);
assertNull(event.getEvent());
groupManager.getReaderGroup(group).initiateCheckpoint("cp1", executorService());
event = reader.readNextEvent(2000);
assertEquals("cp1", event.getCheckpointName());
event = reader.readNextEvent(200);
assertNull(event.getEvent());
groupManager.getReaderGroup(group).initiateCheckpoint("cp2", executorService());
event = reader.readNextEvent(2000);
assertEquals("cp2", event.getCheckpointName());
event = reader.readNextEvent(10000);
assertEquals("truncationTest3", event.getEvent());
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndTxnWithTest method scaleUpStream.
private void scaleUpStream(String streamName) throws InterruptedException, java.util.concurrent.ExecutionException {
Stream stream = new StreamImpl("test", streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = PRAVEGA.getLocalController().scaleStream(stream, Collections.singletonList(0L), map, executorService()).getFuture().get();
assertTrue(result);
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndWithScaleTest method testScale.
@Test(timeout = 90000)
public void testScale() throws Exception {
final String scope = "test";
final String streamName = "test";
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
// Test scale both in a new stream and in a re-created one.
for (int i = 0; i < 2; i++) {
@Cleanup Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(scope, 0L).get();
controller.createStream(scope, streamName, config).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "txntest1" + i).get();
// scale
Stream stream = new StreamImpl(scope, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(i * 4L), map, executorService()).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "txntest2" + i).get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl(scope, controller, clientFactory);
groupManager.createReaderGroup("reader" + i, ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(0).stream(Stream.of(scope, streamName).getScopedName()).build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId" + i, "reader" + i, new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event.getEvent());
assertEquals("txntest1" + i, event.getEvent());
event = reader.readNextEvent(100);
assertNull(event.getEvent());
groupManager.getReaderGroup("reader" + i).initiateCheckpoint("cp" + i, executorService());
event = reader.readNextEvent(10000);
assertEquals("cp" + i, event.getCheckpointName());
event = reader.readNextEvent(10000);
assertEquals("txntest2" + i, event.getEvent());
assertTrue(controller.sealStream(scope, streamName).join());
assertTrue(controller.deleteStream(scope, streamName).join());
}
}
Aggregations