use of io.pravega.client.admin.StreamManager in project pravega by pravega.
the class ScopeTest method testListStreams.
@Test(timeout = 30000)
public void testListStreams() throws Exception {
final String scope = "test";
final String streamName1 = "test1";
final String streamName2 = "test2";
final String streamName3 = "test3";
final Map<String, Integer> foundCount = new HashMap<>();
foundCount.put(streamName1, 0);
foundCount.put(streamName2, 0);
foundCount.put(streamName3, 0);
foundCount.put(NameUtils.getMarkStreamForStream(streamName1), 0);
foundCount.put(NameUtils.getMarkStreamForStream(streamName2), 0);
foundCount.put(NameUtils.getMarkStreamForStream(streamName3), 0);
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
@Cleanup Controller controller = controllerWrapper.getController();
ClientConfig clientConfig = ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build();
@Cleanup ConnectionPool cp = new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig));
controllerWrapper.getControllerService().createScope(scope, 0L).get();
controller.createStream(scope, streamName1, config).get();
controller.createStream(scope, streamName2, config).get();
controller.createStream(scope, streamName3, config).get();
@Cleanup StreamManager manager = new StreamManagerImpl(controller, cp);
Iterator<Stream> iterator = manager.listStreams(scope);
assertTrue(iterator.hasNext());
Stream next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertTrue(iterator.hasNext());
next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertTrue(iterator.hasNext());
next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertTrue(iterator.hasNext());
next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertTrue(iterator.hasNext());
next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertTrue(iterator.hasNext());
next = iterator.next();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
assertFalse(iterator.hasNext());
assertTrue(foundCount.entrySet().stream().allMatch(x -> x.getValue() == 1));
AsyncIterator<Stream> asyncIterator = controller.listStreams(scope);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
foundCount.computeIfPresent(next.getStreamName(), (x, y) -> ++y);
next = asyncIterator.getNext().join();
assertNull(next);
assertTrue(foundCount.entrySet().stream().allMatch(x -> x.getValue() == 2));
}
use of io.pravega.client.admin.StreamManager in project pravega by pravega.
the class ReaderCheckpointTest method readerCheckpointTest.
@Test
public void readerCheckpointTest() {
controllerURI = fetchControllerURI();
final ClientConfig clientConfig = Utils.buildClientConfig(controllerURI);
StreamManager streamManager = StreamManager.create(clientConfig);
assertTrue("Creating Scope", streamManager.createScope(SCOPE_1));
assertTrue("Creating stream", streamManager.createStream(SCOPE_1, STREAM, streamConfig));
@Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(SCOPE_1, clientConfig);
readerGroupManager.createReaderGroup(READER_GROUP_NAME, ReaderGroupConfig.builder().stream(io.pravega.client.stream.Stream.of(SCOPE_1, STREAM)).build());
@Cleanup ReaderGroup readerGroup = readerGroupManager.getReaderGroup(READER_GROUP_NAME);
int startInclusive = 1;
int endExclusive = 100;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_1, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_1, startInclusive, endExclusive);
// initiate checkpoint100
Checkpoint checkPoint100 = createCheckPointAndVerify(readerGroup, "batch100");
// write and read events 100 to 200
startInclusive = 100;
endExclusive = 200;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_1, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_1, startInclusive, endExclusive);
// reset to check point 100
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromCheckpoint(checkPoint100).build());
readEventsAndVerify(SCOPE_1, 100, endExclusive);
// initiate checkpoint200
Checkpoint checkPoint200 = createCheckPointAndVerify(readerGroup, "batch200");
// write and read events 200 to 300
startInclusive = 200;
endExclusive = 300;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_1, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_1, startInclusive, endExclusive);
// reset back to checkpoint 200
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromCheckpoint(checkPoint200).build());
readEventsAndVerify(SCOPE_1, 200, endExclusive);
// reset back to checkpoint 100
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromCheckpoint(checkPoint100).build());
readEventsAndVerify(SCOPE_1, 100, endExclusive);
// clean up
readerGroupManager.deleteReaderGroup(READER_GROUP_NAME);
}
use of io.pravega.client.admin.StreamManager in project pravega by pravega.
the class ReaderCheckpointTest method generateStreamCutsTest.
@Test
public void generateStreamCutsTest() {
controllerURI = fetchControllerURI();
final ClientConfig clientConfig = Utils.buildClientConfig(controllerURI);
StreamManager streamManager = StreamManager.create(clientConfig);
assertTrue("Creating Scope", streamManager.createScope(SCOPE_2));
assertTrue("Creating stream", streamManager.createStream(SCOPE_2, STREAM, streamConfig));
@Cleanup ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(SCOPE_2, clientConfig);
readerGroupManager.createReaderGroup(READER_GROUP_NAME, ReaderGroupConfig.builder().stream(io.pravega.client.stream.Stream.of(SCOPE_2, STREAM)).groupRefreshTimeMillis(GROUP_REFRESH_TIME_MILLIS).build());
@Cleanup ReaderGroup readerGroup = readerGroupManager.getReaderGroup(READER_GROUP_NAME);
int startInclusive = 1;
int endExclusive = 100;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_2, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_2, startInclusive, endExclusive);
// Obtain StreamCuts at 100th event.
Map<Stream, StreamCut> cutAt100 = generateStreamCuts(readerGroup);
// Write and read events 100 to 200
startInclusive = 100;
endExclusive = 200;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_2, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_2, startInclusive, endExclusive);
// Reset to stream cut pointing to 100th event.
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromStreamCuts(cutAt100).build());
readEventsAndVerify(SCOPE_2, 100, endExclusive);
// Obtain stream cut at 200th event.
Map<Stream, StreamCut> cutAt200 = generateStreamCuts(readerGroup);
// Write and read events 200 to 300.
startInclusive = 200;
endExclusive = 300;
log.info("Write events with range [{},{})", startInclusive, endExclusive);
writeEvents(SCOPE_2, IntStream.range(startInclusive, endExclusive).boxed().collect(Collectors.toList()));
readEventsAndVerify(SCOPE_2, startInclusive, endExclusive);
// Reset back to stream cut pointing to 200th event.
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromStreamCuts(cutAt200).build());
readEventsAndVerify(SCOPE_2, 200, endExclusive);
// Reset back to stream cut pointing to 100th event.
readerGroup.resetReaderGroup(ReaderGroupConfig.builder().startFromStreamCuts(cutAt100).build());
readEventsAndVerify(SCOPE_2, 100, endExclusive);
// clean up
readerGroupManager.deleteReaderGroup(READER_GROUP_NAME);
}
use of io.pravega.client.admin.StreamManager in project pravega by pravega.
the class ByteStreamTest method readWriteTest.
@Test(timeout = 30000)
public void readWriteTest() throws IOException {
String scope = "ByteStreamTest";
String stream = "readWriteTest";
StreamConfiguration config = StreamConfiguration.builder().build();
@Cleanup StreamManager streamManager = new StreamManagerImpl(PRAVEGA.getLocalController(), Mockito.mock(ConnectionPool.class));
// create a scope
Boolean createScopeStatus = streamManager.createScope(scope);
log.info("Create scope status {}", createScopeStatus);
// create a stream
Boolean createStreamStatus = streamManager.createStream(scope, stream, config);
log.info("Create stream status {}", createStreamStatus);
@Cleanup ByteStreamClientFactory client = createClientFactory(scope);
byte[] payload = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
byte[] readBuffer = new byte[10];
@Cleanup ByteStreamWriter writer = client.createByteStreamWriter(stream);
@Cleanup ByteStreamReader reader = client.createByteStreamReader(stream);
AssertExtensions.assertBlocks(() -> reader.read(readBuffer), () -> writer.write(payload));
assertArrayEquals(payload, readBuffer);
Arrays.fill(readBuffer, (byte) 0);
writer.write(payload);
writer.write(payload);
writer.write(payload);
writer.closeAndSeal();
assertEquals(10, reader.read(readBuffer));
assertArrayEquals(payload, readBuffer);
for (int i = 0; i < 10; i++) {
assertEquals(i, reader.read());
}
Arrays.fill(readBuffer, (byte) -1);
assertEquals(5, reader.read(readBuffer, 0, 5));
assertEquals(5, reader.read(readBuffer, 5, 5));
assertArrayEquals(payload, readBuffer);
assertEquals(-1, reader.read());
assertEquals(-1, reader.read(readBuffer));
}
use of io.pravega.client.admin.StreamManager in project pravega by pravega.
the class ByteStreamTest method readWriteTestTruncate.
@Test(timeout = 30000)
public void readWriteTestTruncate() throws IOException {
String scope = "ByteStreamTest";
String stream = "readWriteTestTruncate";
StreamConfiguration config = StreamConfiguration.builder().build();
@Cleanup StreamManager streamManager = new StreamManagerImpl(PRAVEGA.getLocalController(), Mockito.mock(ConnectionPool.class));
// create a scope
Boolean createScopeStatus = streamManager.createScope(scope);
log.info("Create scope status {}", createScopeStatus);
// create a stream
Boolean createStreamStatus = streamManager.createStream(scope, stream, config);
log.info("Create stream status {}", createStreamStatus);
@Cleanup ByteStreamClientFactory client = createClientFactory(scope);
byte[] payload = new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
byte[] readBuffer = new byte[10];
@Cleanup ByteStreamWriter writer = client.createByteStreamWriter(stream);
@Cleanup ByteStreamReader reader = client.createByteStreamReader(stream);
// Verify reads and writes.
AssertExtensions.assertBlocks(() -> reader.read(readBuffer), () -> writer.write(payload));
assertArrayEquals(payload, readBuffer);
// Truncate data before offset 5
writer.truncateDataBefore(5);
// seek to an invalid truncated offset and verify if truncation is successful.
reader.seekToOffset(reader.fetchHeadOffset() - 1);
assertThrows(SegmentTruncatedException.class, reader::read);
// seek to the new head and verify if we are able to read the data.
byte[] data = new byte[] { 5, 6, 7, 8, 9 };
reader.seekToOffset(reader.fetchHeadOffset());
byte[] readBuffer1 = new byte[5];
int bytesRead = reader.read(readBuffer1);
assertEquals(5, bytesRead);
assertArrayEquals(readBuffer1, data);
// create a new byteStream Reader.
ByteStreamReader reader1 = client.createByteStreamReader(stream);
// verify it is able to read
readBuffer1 = new byte[5];
bytesRead = reader1.read(readBuffer1);
// verify if all the bytes are read.
assertEquals(5, bytesRead);
assertArrayEquals(readBuffer1, data);
}
Aggregations