use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class StartLocalService method main.
public static void main(String[] args) throws Exception {
@Cleanup ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, StartLocalService.SERVICE_PORT, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
@Cleanup MockStreamManager streamManager = new MockStreamManager(SCOPE, "localhost", StartLocalService.SERVICE_PORT);
streamManager.createScope(SCOPE);
streamManager.createStream(SCOPE, STREAM_NAME, null);
Thread.sleep(60000);
System.exit(0);
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ClusterWrapper method startSegmentStore.
private void startSegmentStore() throws DurableDataLogException {
serviceBuilder = createServiceBuilder();
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
if (authEnabled) {
passwordInputFile = createAuthFile(this.passwordAuthHandlerEntries);
}
segmentStoreServer = new PravegaConnectionListener(this.tlsEnabled, false, "localhost", segmentStorePort, store, tableStore, SegmentStatsRecorder.noOp(), TableSegmentStatsRecorder.noOp(), authEnabled ? new TokenVerifierImpl(tokenSigningKeyBasis) : null, this.tlsServerCertificatePath, this.tlsServerKeyPath, true, serviceBuilder.getLowPriorityExecutor(), tlsProtocolVersion);
segmentStoreServer.startListening();
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ScaleTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup("shutdownNow") val executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "test");
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
int port = Config.SERVICE_PORT;
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
// Create controller object for testing against a separate controller report.
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port);
Controller controller = controllerWrapper.getController();
final String scope = "scope";
controllerWrapper.getControllerService().createScope(scope, 0L).get();
final String streamName = "stream1";
final StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
Stream stream = new StreamImpl(scope, streamName);
log.info("Creating stream {}/{}", scope, streamName);
if (!controller.createStream(scope, streamName, config).get()) {
log.error("Stream already existed, exiting");
return;
}
// Test 1: scale stream: split one segment into two
log.info("Scaling stream {}/{}, splitting one segment into two", scope, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
if (!controller.scaleStream(stream, Collections.singletonList(0L), map, executor).getFuture().get()) {
log.error("Scale stream: splitting segment into two failed, exiting");
return;
}
// Test 2: scale stream: merge two segments into one
log.info("Scaling stream {}/{}, merging two segments into one", scope, streamName);
CompletableFuture<Boolean> scaleResponseFuture = controller.scaleStream(stream, Arrays.asList(1L, 2L), Collections.singletonMap(0.0, 1.0), executor).getFuture();
if (!scaleResponseFuture.get()) {
log.error("Scale stream: merging two segments into one failed, exiting");
return;
}
// Test 3: create a transaction, and try scale operation, it should fail with precondition check failure
CompletableFuture<TxnSegments> txnFuture = controller.createTransaction(stream, 5000);
TxnSegments transaction = txnFuture.get();
if (transaction == null) {
log.error("Create transaction failed, exiting");
return;
}
log.info("Scaling stream {}/{}, splitting one segment into two, while transaction is ongoing", scope, streamName);
scaleResponseFuture = controller.scaleStream(stream, Collections.singletonList(3L), map, executor).getFuture();
CompletableFuture<Boolean> future = scaleResponseFuture.whenComplete((r, e) -> {
if (e != null) {
log.error("Failed: scale with ongoing transaction.", e);
} else if (getAndHandleExceptions(controller.checkTransactionStatus(stream, transaction.getTxnId()), RuntimeException::new) != Transaction.Status.OPEN) {
log.info("Success: scale with ongoing transaction.");
} else {
log.error("Failed: scale with ongoing transaction.");
}
});
CompletableFuture<Void> statusFuture = controller.abortTransaction(stream, transaction.getTxnId());
statusFuture.get();
future.get();
log.info("All scaling test PASSED");
ExecutorServiceHelpers.shutdown(executor);
System.exit(0);
} catch (Throwable t) {
log.error("test failed with {}", t);
System.exit(-1);
}
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ReadFromDeletedStreamTest method testDeletedAndRecreatedStream.
@Test(timeout = 30000)
public void testDeletedAndRecreatedStream() throws Exception {
@Cleanup MockStreamManager streamManager = new MockStreamManager("test", "localhost", Config.SERVICE_PORT);
@Cleanup ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, false, "localhost", 12345, store, tableStore, SegmentStatsRecorder.noOp(), TableSegmentStatsRecorder.noOp(), null, null, null, true, serviceBuilder.getLowPriorityExecutor(), SecurityConfigDefaults.TLS_PROTOCOL_VERSION);
server.startListening();
streamManager.createScope("test");
streamManager.createStream("test", "test", CONFIG);
@Cleanup MockClientFactory clientFactory = streamManager.getClientFactory();
// Mocking pravega service by putting scale up and scale down requests for the stream
@Cleanup EventStreamWriter<String> test = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
test.writeEvent("0", "foo").get();
streamManager.deleteStream("test", "test");
AssertExtensions.assertThrows(NoSuchSegmentException.class, () -> test.writeEvent("0", "foo").get());
}
use of io.pravega.segmentstore.contracts.tables.TableStore in project pravega by pravega.
the class ControllerServiceTest method setUp.
@Before
public void setUp() throws Exception {
zkTestServer = new TestingServerStarter().start();
serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
server = new PravegaConnectionListener(false, servicePort, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), false, controllerPort, serviceHost, servicePort, containerCount);
controllerWrapper.awaitRunning();
executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "collector");
}
Aggregations