use of io.pravega.client.batch.SegmentRange in project pravega by pravega.
the class BatchClientTest method testBatchClient.
@Test(timeout = 50000)
public void testBatchClient() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope(SCOPE).streamName(STREAM).scalingPolicy(ScalingPolicy.fixed(1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(SCOPE).get();
controller.createStream(config).get();
// create reader and writer.
@Cleanup ClientFactory clientFactory = ClientFactory.withScope(SCOPE, controllerUri);
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope(SCOPE, controllerUri);
groupManager.createReaderGroup("group", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream(Stream.of(SCOPE, STREAM)).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(STREAM, serializer, EventWriterConfig.builder().build());
// write events to stream with 1 segment.
writeEvents(writer);
// scale up and write events.
Stream stream = new StreamImpl(SCOPE, STREAM);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue("Scale up operation", result);
writeEvents(writer);
// scale down and write events.
map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
result = controller.scaleStream(stream, Arrays.asList(1, 2, 3), map, executor).getFuture().get();
assertTrue("Scale down operation result", result);
writeEvents(writer);
BatchClient batchClient = clientFactory.createBatchClient();
// List out all the segments in the stream.
ArrayList<SegmentRange> segments = Lists.newArrayList(batchClient.getSegments(stream, null, null).getIterator());
assertEquals("Expected number of segments", 6, segments.size());
// Batch read all events from stream.
List<String> batchEventList = new ArrayList<>();
segments.forEach(segInfo -> {
@Cleanup SegmentIterator<String> segmentIterator = batchClient.readSegment(segInfo, serializer);
batchEventList.addAll(Lists.newArrayList(segmentIterator));
});
assertEquals("Event count", 9, batchEventList.size());
// read from a given offset.
Segment seg0 = new Segment(SCOPE, STREAM, 0);
SegmentRange seg0Info = SegmentRangeImpl.builder().segment(seg0).startOffset(60).endOffset(90).build();
@Cleanup SegmentIterator<String> seg0Iterator = batchClient.readSegment(seg0Info, serializer);
ArrayList<String> dataAtOffset = Lists.newArrayList(seg0Iterator);
assertEquals(1, dataAtOffset.size());
assertEquals(DATA_OF_SIZE_30, dataAtOffset.get(0));
}
use of io.pravega.client.batch.SegmentRange in project pravega by pravega.
the class BatchClientFactoryImpl method getStreamSegmentInfo.
private StreamSegmentsIterator getStreamSegmentInfo(final Stream stream, final StreamCut startStreamCut, final StreamCut endStreamCut) {
log.debug("Start stream cut: {}, End stream cut: {}", startStreamCut, endStreamCut);
StreamSegmentsInfoImpl.validateStreamCuts(startStreamCut, endStreamCut);
StreamSegmentSuccessors segments = getAndHandleExceptions(controller.getSegments(startStreamCut, endStreamCut), RuntimeException::new);
final SortedSet<Segment> segmentSet = new TreeSet<>(segments.getSegments());
final DelegationTokenProvider tokenProvider = DelegationTokenProviderFactory.create(controller, stream.getScope(), stream.getStreamName(), AccessOperation.READ);
log.debug("List of Segments between the start and end stream cuts : {}", segmentSet);
val futures = segmentSet.stream().map(s -> getSegmentRange(s, startStreamCut, endStreamCut, tokenProvider)).collect(Collectors.toList());
List<SegmentRange> results = Futures.getThrowingException(Futures.allOfWithResults(futures));
return StreamSegmentsInfoImpl.builder().segmentRangeIterator(results.iterator()).startStreamCut(startStreamCut).endStreamCut(endStreamCut).build();
}
use of io.pravega.client.batch.SegmentRange in project pravega by pravega.
the class BatchClientImplTest method testGetSegmentsWithUnboundedStreamCut.
@Test(timeout = 5000)
public void testGetSegmentsWithUnboundedStreamCut() throws Exception {
PravegaNodeUri location = new PravegaNodeUri("localhost", 0);
@Cleanup MockConnectionFactoryImpl connectionFactory = getMockConnectionFactory(location);
MockController mockController = new MockController(location.getEndpoint(), location.getPort(), connectionFactory, false);
Stream stream = createStream(SCOPE, STREAM, 3, mockController);
@Cleanup BatchClientFactoryImpl client = new BatchClientFactoryImpl(mockController, ClientConfig.builder().maxConnectionsPerSegmentStore(1).build(), connectionFactory);
Iterator<SegmentRange> unBoundedSegments = client.getSegments(stream, StreamCut.UNBOUNDED, StreamCut.UNBOUNDED).getIterator();
assertTrue(unBoundedSegments.hasNext());
assertEquals(0L, unBoundedSegments.next().asImpl().getSegment().getSegmentId());
assertTrue(unBoundedSegments.hasNext());
assertEquals(1L, unBoundedSegments.next().asImpl().getSegment().getSegmentId());
assertTrue(unBoundedSegments.hasNext());
assertEquals(2L, unBoundedSegments.next().asImpl().getSegment().getSegmentId());
assertFalse(unBoundedSegments.hasNext());
}
use of io.pravega.client.batch.SegmentRange in project pravega by pravega.
the class SegmentRangeImplTest method testValid.
@Test
public void testValid() {
SegmentRange segmentRange = SegmentRangeImpl.builder().endOffset(20L).segment(new Segment("scope", "stream", 0)).build();
assertEquals(0L, segmentRange.getStartOffset());
assertEquals(20L, segmentRange.getEndOffset());
assertEquals(new Segment("scope", "stream", 0), ((SegmentRangeImpl) segmentRange).getSegment());
assertEquals("scope", segmentRange.getScope());
assertEquals("stream", segmentRange.getStreamName());
}
use of io.pravega.client.batch.SegmentRange in project pravega by pravega.
the class BatchClientTest method testBatchClientWithStreamTruncation.
@Test(timeout = 50000)
@SuppressWarnings("deprecation")
public void testBatchClientWithStreamTruncation() throws InterruptedException, ExecutionException {
@Cleanup StreamManager streamManager = StreamManager.create(clientConfig);
@Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(SCOPE, clientConfig);
createTestStreamWithEvents(clientFactory);
log.info("Done creating a test stream with test events");
@Cleanup BatchClientFactory batchClient = BatchClientFactory.withScope(SCOPE, clientConfig);
log.info("Done creating batch client factory");
// 1. Create a StreamCut after 2 events(offset = 2 * 30 = 60).
StreamCut streamCut60L = new StreamCutImpl(Stream.of(SCOPE, STREAM), ImmutableMap.of(new Segment(SCOPE, STREAM, 0), 60L));
// 2. Truncate stream.
assertTrue("truncate stream", controllerWrapper.getController().truncateStream(SCOPE, STREAM, streamCut60L).join());
// 3a. Fetch Segments using StreamCut.UNBOUNDED>
ArrayList<SegmentRange> segmentsPostTruncation1 = Lists.newArrayList(batchClient.getSegments(Stream.of(SCOPE, STREAM), StreamCut.UNBOUNDED, StreamCut.UNBOUNDED).getIterator());
// 3b. Fetch Segments using getStreamInfo() api.
StreamInfo streamInfo = streamManager.getStreamInfo(SCOPE, STREAM);
ArrayList<SegmentRange> segmentsPostTruncation2 = Lists.newArrayList(batchClient.getSegments(Stream.of(SCOPE, STREAM), streamInfo.getHeadStreamCut(), streamInfo.getTailStreamCut()).getIterator());
// Validate results.
validateSegmentCountAndEventCount(batchClient, segmentsPostTruncation1);
validateSegmentCountAndEventCount(batchClient, segmentsPostTruncation2);
}
Aggregations