use of io.pravega.client.netty.impl.ConnectionFactoryImpl in project pravega by pravega.
the class PravegaTest method createStream.
/**
* Invoke the createStream method, ensure we are able to create stream.
*
* @throws InterruptedException if interrupted
* @throws URISyntaxException If URI is invalid
* @throws ExecutionException if error in create stream
*/
@Before
public void createStream() throws InterruptedException, ExecutionException {
Service conService = Utils.createPravegaControllerService(null);
List<URI> ctlURIs = conService.getServiceDetails();
URI controllerUri = ctlURIs.get(0);
log.info("Invoking create stream with Controller URI: {}", controllerUri);
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
ControllerImpl controller = new ControllerImpl(ControllerImplConfig.builder().clientConfig(ClientConfig.builder().controllerURI(controllerUri).build()).build(), connectionFactory.getInternalExecutor());
assertTrue(controller.createScope(STREAM_SCOPE).get());
assertTrue(controller.createStream(config).get());
}
use of io.pravega.client.netty.impl.ConnectionFactoryImpl in project pravega by pravega.
the class EndToEndAutoScaleUpTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
int port = Config.SERVICE_PORT;
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port, false);
Controller controller = controllerWrapper.getController();
ClientFactory internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, new ConnectionFactoryImpl(ClientConfig.builder().build()));
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
@Cleanup SegmentStatsFactory segmentStatsFactory = new SegmentStatsFactory();
SegmentStatsRecorder statsRecorder = segmentStatsFactory.createSegmentStatsRecorder(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).build());
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, "localhost", 12345, store, statsRecorder, null, null, null);
server.startListening();
controllerWrapper.awaitRunning();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(CONFIG).get();
@Cleanup MockClientFactory clientFactory = new MockClientFactory("test", controller);
// Mocking pravega service by putting scale up and scale down requests for the stream
EventStreamWriter<String> test = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
// keep writing. Scale should happen
long start = System.currentTimeMillis();
char[] chars = new char[1];
Arrays.fill(chars, 'a');
String str = new String(chars);
CompletableFuture.runAsync(() -> {
while (System.currentTimeMillis() - start < Duration.ofMinutes(3).toMillis()) {
try {
test.writeEvent("0", str).get();
} catch (Throwable e) {
System.err.println("test exception writing events " + e.getMessage());
break;
}
}
});
Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
if (streamSegments.getSegments().size() > 3) {
System.err.println("Success");
log.info("Success");
System.exit(0);
} else {
throw new NotDoneException();
}
}), Executors.newSingleThreadScheduledExecutor()).exceptionally(e -> {
System.err.println("Failure");
log.error("Failure");
System.exit(1);
return null;
}).get();
} catch (Throwable e) {
System.err.print("Test failed with exception: " + e.getMessage());
System.exit(-1);
}
System.exit(0);
}
use of io.pravega.client.netty.impl.ConnectionFactoryImpl in project pravega by pravega.
the class EndToEndAutoScaleDownTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
int port = Config.SERVICE_PORT;
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port, false);
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(NameUtils.INTERNAL_SCOPE_NAME).get();
ClientFactory internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, new ConnectionFactoryImpl(ClientConfig.builder().build()));
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
@Cleanup SegmentStatsFactory segmentStatsFactory = new SegmentStatsFactory();
SegmentStatsRecorder statsRecorder = segmentStatsFactory.createSegmentStatsRecorder(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).with(AutoScalerConfig.CACHE_CLEANUP_IN_SECONDS, 5).with(AutoScalerConfig.CACHE_EXPIRY_IN_SECONDS, 30).build());
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, "localhost", 12345, store, statsRecorder, null, null, null);
server.startListening();
controllerWrapper.awaitRunning();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(CONFIG).get();
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
if (streamSegments.getSegments().size() < 3) {
System.err.println("Success");
log.info("Success");
System.exit(0);
} else {
throw new NotDoneException();
}
}), Executors.newSingleThreadScheduledExecutor()).exceptionally(e -> {
System.err.println("Failure");
log.error("Failure");
System.exit(1);
return null;
}).get();
} catch (Throwable e) {
System.err.print("Test failed with exception: " + e.getMessage());
System.exit(-1);
}
System.exit(0);
}
use of io.pravega.client.netty.impl.ConnectionFactoryImpl in project pravega by pravega.
the class SetupUtils method startAllServices.
/**
* Start all pravega related services required for the test deployment.
*
* @param numThreads the number of threads for the internal client threadpool.
* @throws Exception on any errors.
*/
public void startAllServices(Integer numThreads) throws Exception {
if (!this.started.compareAndSet(false, true)) {
log.warn("Services already started, not attempting to start again");
return;
}
this.connectionFactory = new ConnectionFactoryImpl(clientConfig, numThreads);
this.controller = new ControllerImpl(ControllerImplConfig.builder().clientConfig(clientConfig).build(), connectionFactory.getInternalExecutor());
this.clientFactory = new ClientFactoryImpl(scope, controller, connectionFactory);
// Start zookeeper.
this.zkTestServer = new TestingServerStarter().start();
this.zkTestServer.start();
// Start Pravega Service.
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
this.server = new PravegaConnectionListener(false, servicePort, store);
this.server.startListening();
log.info("Started Pravega Service");
// Start Controller.
this.controllerWrapper = new ControllerWrapper(this.zkTestServer.getConnectString(), false, true, controllerRPCPort, "localhost", servicePort, Config.HOST_STORE_CONTAINER_COUNT, controllerRESTPort);
this.controllerWrapper.awaitRunning();
this.controllerWrapper.getController().createScope(scope).get();
log.info("Initialized Pravega Controller");
}
use of io.pravega.client.netty.impl.ConnectionFactoryImpl in project pravega by pravega.
the class StreamCutsTest method testReaderGroupCuts.
@Test(timeout = 40000)
public void testReaderGroupCuts() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "fpj was here").get();
writer.writeEvent("0", "fpj was here again").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
ReaderGroup readerGroup = groupManager.createReaderGroup("cuts", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "cuts", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> firstEvent = reader.readNextEvent(15000);
EventRead<String> secondEvent = reader.readNextEvent(15000);
assertNotNull(firstEvent);
assertEquals("fpj was here", firstEvent.getEvent());
assertNotNull(secondEvent);
assertEquals("fpj was here again", secondEvent.getEvent());
Map<Stream, StreamCut> cuts = readerGroup.getStreamCuts();
validateCuts(readerGroup, cuts, Collections.singleton("test/test/0"));
// Scale the stream to verify that we get more segments in the cut.
Stream stream = Stream.of("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 1st scaling");
writer.writeEvent("0", "fpj was here again").get();
writer.writeEvent("1", "fpj was here again").get();
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
HashSet<String> segmentNames = new HashSet<>();
segmentNames.add("test/test/1");
segmentNames.add("test/test/2");
validateCuts(readerGroup, cuts, Collections.unmodifiableSet(segmentNames));
// Scale down to verify that the number drops back.
map = new HashMap<>();
map.put(0.0, 1.0);
ArrayList<Integer> toSeal = new ArrayList<>();
toSeal.add(1);
toSeal.add(2);
result = controller.scaleStream(stream, Collections.unmodifiableList(toSeal), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 2nd scaling");
writer.writeEvent("0", "fpj was here again").get();
reader.readNextEvent(15000);
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
validateCuts(readerGroup, cuts, Collections.singleton("test/test/3"));
// Scale up to 4 segments again.
map = new HashMap<>();
map.put(0.0, 0.25);
map.put(0.25, 0.5);
map.put(0.5, 0.75);
map.put(0.75, 1.0);
result = controller.scaleStream(stream, Collections.singletonList(3), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 3rd scaling");
writer.writeEvent("0", "fpj was here again").get();
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
segmentNames = new HashSet<>();
segmentNames.add("test/test/4");
segmentNames.add("test/test/5");
segmentNames.add("test/test/6");
segmentNames.add("test/test/7");
validateCuts(readerGroup, cuts, Collections.unmodifiableSet(segmentNames));
}
Aggregations