use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class PravegaResource method before.
@Override
protected void before() throws Exception {
// 1. Start ZK
zkTestServer = new TestingServerStarter().start();
serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
// 2. Start controller
// the default watermarking frequency on the controller is 10 seconds.
System.setProperty("controller.watermarking.frequency.seconds", "5");
controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), false, controllerPort, serviceHost, servicePort, containerCount);
controllerWrapper.awaitRunning();
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ReadTest method readThroughStreamClient.
@Test(timeout = 10000)
public void readThroughStreamClient() throws ReinitializationRequiredException {
String endpoint = "localhost";
String streamName = "readThroughStreamClient";
String readerName = "reader";
String readerGroup = "readThroughStreamClient-group";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
String scope = "Scope1";
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, NoOpScheduledExecutor.get());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(scope, endpoint, port);
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().stream(Stream.of(scope, streamName)).disableAutomaticCheckpoints().build();
streamManager.createScope(scope);
streamManager.createStream(scope, streamName, null);
streamManager.createReaderGroup(readerGroup, groupConfig);
JavaSerializer<String> serializer = new JavaSerializer<>();
@Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, serializer, EventWriterConfig.builder().build());
producer.writeEvent(testString);
producer.flush();
@Cleanup EventStreamReader<String> reader = clientFactory.createReader(readerName, readerGroup, serializer, ReaderConfig.builder().build());
String read = reader.readNextEvent(5000).getEvent();
assertEquals(testString, read);
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ReadTest method readThroughSegmentClient.
@Test(timeout = 10000)
public void readThroughSegmentClient() throws SegmentSealedException, EndOfSegmentException, SegmentTruncatedException {
String endpoint = "localhost";
String scope = "scope";
String stream = "readThroughSegmentClient";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
server.startListening();
@Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF);
@Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true);
controller.createScope(scope);
controller.createStream(scope, stream, StreamConfiguration.builder().build());
SegmentOutputStreamFactoryImpl segmentproducerClient = new SegmentOutputStreamFactoryImpl(controller, connectionPool);
SegmentInputStreamFactoryImpl segmentConsumerClient = new SegmentInputStreamFactoryImpl(controller, connectionPool);
Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
@Cleanup SegmentOutputStream out = segmentproducerClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), DelegationTokenProviderFactory.createWithEmptyToken());
out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(testString.getBytes()), new CompletableFuture<>()));
out.flush();
@Cleanup EventSegmentReader in = segmentConsumerClient.createEventReaderForSegment(segment);
ByteBuffer result = in.read();
assertEquals(ByteBuffer.wrap(testString.getBytes()), result);
// Test large write followed by read
out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[15]), new CompletableFuture<>()));
out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(new byte[150000]), new CompletableFuture<>()));
assertEquals(in.read().capacity(), 15);
assertEquals(in.read().capacity(), 15);
assertEquals(in.read().capacity(), 150000);
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ReaderGroupTest method testEventHandoff.
@Test(timeout = 20000)
public void testEventHandoff() throws Exception {
String endpoint = "localhost";
int servicePort = TestUtils.getAvailableListenPort();
@Cleanup ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(SCOPE, endpoint, servicePort);
streamManager.createScope(SCOPE);
streamManager.createStream(SCOPE, STREAM_NAME, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(2)).build());
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
ReaderGroupConfig groupConfig = ReaderGroupConfig.builder().automaticCheckpointIntervalMillis(-1).stream(Stream.of(SCOPE, STREAM_NAME)).build();
streamManager.createReaderGroup(READER_GROUP, groupConfig);
writeEvents(100, clientFactory);
ReaderThread r1 = new ReaderThread(20, "Reader1", clientFactory);
ReaderThread r2 = new ReaderThread(80, "Reader2", clientFactory);
Thread reader1Thread = new Thread(r1);
Thread reader2Thread = new Thread(r2);
reader1Thread.start();
reader2Thread.start();
reader1Thread.join();
reader2Thread.join();
if (r1.exception.get() != null) {
throw r1.exception.get();
}
if (r2.exception.get() != null) {
throw r2.exception.get();
}
streamManager.deleteReaderGroup(READER_GROUP);
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class AppendTest method appendThroughSegmentClient.
@Test(timeout = 10000)
public void appendThroughSegmentClient() throws Exception {
String endpoint = "localhost";
int port = TestUtils.getAvailableListenPort();
String testString = "Hello world\n";
String scope = "scope";
String stream = "appendThroughSegmentClient";
StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService();
TableStore tableStore = SERVICE_BUILDER.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor());
server.startListening();
@Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF);
@Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true);
controller.createScope(scope);
controller.createStream(scope, stream, StreamConfiguration.builder().build());
SegmentOutputStreamFactoryImpl segmentClient = new SegmentOutputStreamFactoryImpl(controller, connectionPool);
Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next();
@Cleanup SegmentOutputStream out = segmentClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), DelegationTokenProviderFactory.createWithEmptyToken());
CompletableFuture<Void> ack = new CompletableFuture<>();
out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(testString.getBytes()), ack));
ack.get(5, TimeUnit.SECONDS);
}
Aggregations