use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndTxnWithTest method testTxnWithScale.
@Test(timeout = 10000)
public void testTxnWithScale() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
Controller controller = PRAVEGA.getLocalController();
controller.createScope("test").get();
String streamName = "testTxnWithScale";
controller.createStream("test", streamName, config).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup TransactionalEventStreamWriter<String> test = clientFactory.createTransactionalEventWriter("writer", streamName, new UTF8StringSerializer(), EventWriterConfig.builder().transactionTimeoutTime(10000).build());
Transaction<String> transaction1 = test.beginTxn();
transaction1.writeEvent("0", "txntest1");
transaction1.commit();
assertEventuallyEquals(Transaction.Status.COMMITTED, () -> transaction1.checkStatus(), 5000);
// scale
Stream stream = new StreamImpl("test", streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0L), map, executorService()).getFuture().get();
assertTrue(result);
Transaction<String> transaction2 = test.beginTxn();
transaction2.writeEvent("0", "txntest2");
transaction2.commit();
String group = "testTxnWithScale-group";
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory);
groupManager.createReaderGroup(group, ReaderGroupConfig.builder().disableAutomaticCheckpoints().groupRefreshTimeMillis(0).stream("test/" + streamName).build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", group, new UTF8StringSerializer(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(5000);
assertNotNull(event.getEvent());
assertEquals("txntest1", event.getEvent());
assertNull(reader.readNextEvent(100).getEvent());
groupManager.getReaderGroup(group).initiateCheckpoint("cp", executorService());
event = reader.readNextEvent(5000);
assertEquals("cp", event.getCheckpointName());
event = reader.readNextEvent(5000);
assertNotNull(event.getEvent());
assertEquals("txntest2", event.getEvent());
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class LocalControllerTest method testUpdateSubscriberStreamCut.
@Test(timeout = 10000)
public void testUpdateSubscriberStreamCut() throws ExecutionException, InterruptedException {
UUID someId = UUID.randomUUID();
StreamCut streamCut = new StreamCutImpl(new StreamImpl("scope", "stream"), Collections.emptyMap());
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.SUCCESS).build()));
Assert.assertTrue(this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join());
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.FAILURE).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof ControllerFailureException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.STREAM_NOT_FOUND).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.SUBSCRIBER_NOT_FOUND).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.GENERATION_MISMATCH).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class LocalControllerTest method testScaleStream.
@Test(timeout = 10000)
public void testScaleStream() throws ExecutionException, InterruptedException {
when(this.mockControllerService.checkScale(anyString(), anyString(), anyInt(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.ScaleStatusResponse.newBuilder().setStatus(Controller.ScaleStatusResponse.ScaleStatus.SUCCESS).build()));
when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder().setStatus(Controller.ScaleResponse.ScaleStreamStatus.STARTED).build()));
Assert.assertTrue(this.testController.scaleStream(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>(), executorService()).getFuture().join());
when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder().setStatus(Controller.ScaleResponse.ScaleStreamStatus.PRECONDITION_FAILED).build()));
Assert.assertFalse(this.testController.scaleStream(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>(), executorService()).getFuture().join());
when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder().setStatus(Controller.ScaleResponse.ScaleStreamStatus.FAILURE).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.startScale(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>()).join(), ex -> ex instanceof ControllerFailureException);
when(this.mockControllerService.scale(any(), any(), any(), any(), anyLong(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.ScaleResponse.newBuilder().setStatusValue(-1).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.startScale(new StreamImpl("scope", "stream"), new ArrayList<>(), new HashMap<>()).join(), ex -> ex instanceof ControllerFailureException);
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class ScaleTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup("shutdownNow") val executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "test");
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
int port = Config.SERVICE_PORT;
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, serviceBuilder.getLowPriorityExecutor());
server.startListening();
// Create controller object for testing against a separate controller report.
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port);
Controller controller = controllerWrapper.getController();
final String scope = "scope";
controllerWrapper.getControllerService().createScope(scope, 0L).get();
final String streamName = "stream1";
final StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
Stream stream = new StreamImpl(scope, streamName);
log.info("Creating stream {}/{}", scope, streamName);
if (!controller.createStream(scope, streamName, config).get()) {
log.error("Stream already existed, exiting");
return;
}
// Test 1: scale stream: split one segment into two
log.info("Scaling stream {}/{}, splitting one segment into two", scope, streamName);
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
if (!controller.scaleStream(stream, Collections.singletonList(0L), map, executor).getFuture().get()) {
log.error("Scale stream: splitting segment into two failed, exiting");
return;
}
// Test 2: scale stream: merge two segments into one
log.info("Scaling stream {}/{}, merging two segments into one", scope, streamName);
CompletableFuture<Boolean> scaleResponseFuture = controller.scaleStream(stream, Arrays.asList(1L, 2L), Collections.singletonMap(0.0, 1.0), executor).getFuture();
if (!scaleResponseFuture.get()) {
log.error("Scale stream: merging two segments into one failed, exiting");
return;
}
// Test 3: create a transaction, and try scale operation, it should fail with precondition check failure
CompletableFuture<TxnSegments> txnFuture = controller.createTransaction(stream, 5000);
TxnSegments transaction = txnFuture.get();
if (transaction == null) {
log.error("Create transaction failed, exiting");
return;
}
log.info("Scaling stream {}/{}, splitting one segment into two, while transaction is ongoing", scope, streamName);
scaleResponseFuture = controller.scaleStream(stream, Collections.singletonList(3L), map, executor).getFuture();
CompletableFuture<Boolean> future = scaleResponseFuture.whenComplete((r, e) -> {
if (e != null) {
log.error("Failed: scale with ongoing transaction.", e);
} else if (getAndHandleExceptions(controller.checkTransactionStatus(stream, transaction.getTxnId()), RuntimeException::new) != Transaction.Status.OPEN) {
log.info("Success: scale with ongoing transaction.");
} else {
log.error("Failed: scale with ongoing transaction.");
}
});
CompletableFuture<Void> statusFuture = controller.abortTransaction(stream, transaction.getTxnId());
statusFuture.get();
future.get();
log.info("All scaling test PASSED");
ExecutorServiceHelpers.shutdown(executor);
System.exit(0);
} catch (Throwable t) {
log.error("test failed with {}", t);
System.exit(-1);
}
}
use of io.pravega.client.stream.impl.StreamImpl in project pravega by pravega.
the class EndToEndAutoScaleUpWithTxnTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
int port = Config.SERVICE_PORT;
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port);
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(NameUtils.INTERNAL_SCOPE_NAME, 0L).get();
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactoryImpl internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, connectionFactory);
@Cleanup("shutdownNow") val executor = ExecutorServiceHelpers.newScheduledThreadPool(1, "test");
@Cleanup ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
TableStore tableStore = serviceBuilder.createTableStoreService();
@Cleanup AutoScaleMonitor autoScaleMonitor = new AutoScaleMonitor(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).build());
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, false, "localhost", 12345, store, tableStore, autoScaleMonitor.getStatsRecorder(), autoScaleMonitor.getTableSegmentStatsRecorder(), null, null, null, true, serviceBuilder.getLowPriorityExecutor(), Config.TLS_PROTOCOL_VERSION.toArray(new String[Config.TLS_PROTOCOL_VERSION.size()]));
server.startListening();
controllerWrapper.awaitRunning();
controllerWrapper.getControllerService().createScope("test", 0L).get();
controller.createStream("test", "test", CONFIG).get();
@Cleanup MockClientFactory clientFactory = new MockClientFactory("test", controller, internalCF.getConnectionPool());
// Mocking pravega service by putting scale up and scale down requests for the stream
EventWriterConfig writerConfig = EventWriterConfig.builder().transactionTimeoutTime(30000).build();
TransactionalEventStreamWriter<String> test = clientFactory.createTransactionalEventWriter("writer", "test", new UTF8StringSerializer(), writerConfig);
// region Successful commit tests
Transaction<String> txn1 = test.beginTxn();
txn1.writeEvent("1");
txn1.flush();
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 1.0 / 3.0);
map.put(1.0 / 3.0, 2.0 / 3.0);
map.put(2.0 / 3.0, 1.0);
Stream stream = new StreamImpl("test", "test");
controller.startScale(stream, Collections.singletonList(0L), map).get();
Transaction<String> txn2 = test.beginTxn();
txn2.writeEvent("2");
txn2.flush();
txn2.commit();
txn1.commit();
Thread.sleep(1000);
@Cleanup ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl("test", controller, clientFactory);
readerGroupManager.createReaderGroup("readergrp", ReaderGroupConfig.builder().stream("test/test").build());
final EventStreamReader<String> reader = clientFactory.createReader("1", "readergrp", new JavaSerializer<>(), ReaderConfig.builder().build());
String event1 = reader.readNextEvent(SECONDS.toMillis(60)).getEvent();
String event2 = reader.readNextEvent(SECONDS.toMillis(60)).getEvent();
assert event1.equals("1");
assert event2.equals("2");
final AtomicBoolean done = new AtomicBoolean(false);
startWriter(test, done);
Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
if (streamSegments.getSegments().stream().anyMatch(x -> NameUtils.getEpoch(x.getSegmentId()) > 5)) {
System.err.println("Success");
log.info("Success");
System.exit(0);
} else {
throw new NotDoneException();
}
}), executor).exceptionally(e -> {
System.err.println("Failure");
log.error("Failure");
System.exit(1);
return null;
}).get();
} catch (Throwable e) {
System.err.print("Test failed with exception: " + e.getMessage());
log.error("Test failed with exception: {}", e);
System.exit(-1);
}
System.exit(0);
}
Aggregations