use of io.pravega.client.control.impl.Controller in project pravega by pravega.
the class AppendTest method appendALotOfData.
@Test(timeout = 100000)
public void appendALotOfData() {
String endpoint = "localhost";
String scope = "Scope";
String streamName = "appendALotOfData";
int port = TestUtils.getAvailableListenPort();
long heapSize = Runtime.getRuntime().maxMemory();
long messageSize = Math.min(1024 * 1024, heapSize / 20000);
ByteBuffer payload = ByteBuffer.allocate((int) messageSize);
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup("shutdown") InlineExecutor tokenExpiryExecutor = new InlineExecutor();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, tokenExpiryExecutor);
server.startListening();
ClientConfig config = ClientConfig.builder().build();
SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(config);
@Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(config, clientCF);
Controller controller = new MockController(endpoint, port, connectionPool, true);
@Cleanup StreamManagerImpl streamManager = new StreamManagerImpl(controller, connectionPool);
streamManager.createScope(scope);
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, config);
streamManager.createStream("Scope", streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build());
@Cleanup EventStreamWriter<ByteBuffer> producer = clientFactory.createEventWriter(streamName, new ByteBufferSerializer(), EventWriterConfig.builder().build());
@Cleanup RawClient rawClient = new RawClient(new PravegaNodeUri(endpoint, port), connectionPool);
for (int i = 0; i < 10; i++) {
for (int j = 0; j < 100; j++) {
producer.writeEvent(payload.slice());
}
producer.flush();
long requestId = rawClient.getFlow().getNextSequenceNumber();
String scopedName = new Segment(scope, streamName, 0).getScopedName();
WireCommands.TruncateSegment request = new WireCommands.TruncateSegment(requestId, scopedName, i * 100L * (payload.remaining() + TYPE_PLUS_LENGTH_SIZE), "");
Reply reply = rawClient.sendRequest(requestId, request).join();
assertFalse(reply.toString(), reply.isFailure());
}
producer.close();
}
use of io.pravega.client.control.impl.Controller in project pravega by pravega.
the class BoundedStreamReaderTest method scaleStream.
private void scaleStream(final String streamName, final Map<Double, Double> keyRanges) throws Exception {
Stream stream = Stream.of(SCOPE, streamName);
Controller controller = controllerWrapper.getController();
assertTrue(controller.scaleStream(stream, Collections.singletonList(0L), keyRanges, executorService()).getFuture().get());
}
use of io.pravega.client.control.impl.Controller in project pravega by pravega.
the class StreamSeekTest method createStream.
private void createStream(String streamName) throws Exception {
Controller controller = controllerWrapper.getController();
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
controller.createStream(SCOPE, streamName, config).join();
}
use of io.pravega.client.control.impl.Controller in project pravega by pravega.
the class StreamSeekTest method scaleStream.
private void scaleStream(final String streamName, final Map<Double, Double> keyRanges) throws Exception {
Stream stream = Stream.of(SCOPE, streamName);
Controller controller = controllerWrapper.getController();
assertTrue(controller.scaleStream(stream, Collections.singletonList(0L), keyRanges, executorService()).getFuture().get());
}
use of io.pravega.client.control.impl.Controller in project pravega by pravega.
the class UnreadBytesTest method testUnreadBytesWithEndStreamCuts.
@Test(timeout = 50000)
public void testUnreadBytesWithEndStreamCuts() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
String streamName = "testUnreadBytesWithEndStreamCuts";
Controller controller = PRAVEGA.getLocalController();
controller.createScope("unreadbytes").get();
controller.createStream("unreadbytes", streamName, config).get();
@Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build());
// Write just 2 events to simplify simulating a checkpoint.
writer.writeEvent("0", "data of size 30").get();
writer.writeEvent("0", "data of size 30").get();
String group = "testUnreadBytesWithEndStreamCuts-group";
@Cleanup ReaderGroupManager groupManager = ReaderGroupManager.withScope("unreadbytes", ClientConfig.builder().controllerURI(PRAVEGA.getControllerURI()).build());
// create a bounded reader group.
groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("unreadbytes/" + streamName, StreamCut.UNBOUNDED, getStreamCut(streamName, 90L, 0)).build());
ReaderGroup readerGroup = groupManager.getReaderGroup(group);
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> firstEvent = reader.readNextEvent(15000);
EventRead<String> secondEvent = reader.readNextEvent(15000);
assertNotNull(firstEvent);
assertEquals("data of size 30", firstEvent.getEvent());
assertNotNull(secondEvent);
assertEquals("data of size 30", secondEvent.getEvent());
// trigger a checkpoint.
CompletableFuture<Checkpoint> chkPointResult = readerGroup.initiateCheckpoint("test", executorService());
EventRead<String> chkpointEvent = reader.readNextEvent(15000);
assertEquals("test", chkpointEvent.getCheckpointName());
EventRead<String> emptyEvent = reader.readNextEvent(100);
assertEquals(false, emptyEvent.isCheckpoint());
assertEquals(null, emptyEvent.getEvent());
chkPointResult.join();
// Writer events, to ensure 120Bytes are written.
writer.writeEvent("0", "data of size 30").get();
writer.writeEvent("0", "data of size 30").get();
long unreadBytes = readerGroup.getMetrics().unreadBytes();
// Ensure the endoffset of 90 Bytes is taken into consideration when computing unread
assertTrue("Unread bvtes: " + unreadBytes, unreadBytes == 30);
}
Aggregations