use of io.pravega.client.ClientFactory in project pravega by pravega.
the class InProcPravegaClusterTest method createTestStream.
/**
* Create the test stream.
*
* @throws Exception on any errors.
*/
@Test
public void createTestStream() throws Exception {
Assert.assertNotNull("Pravega not initialized", localPravega);
String scope = "Scope";
String streamName = "Stream";
int numSegments = 10;
ClientConfig clientConfig = ClientConfig.builder().controllerURI(URI.create(localPravega.getInProcPravegaCluster().getControllerURI())).credentials(new DefaultCredentials("1111_aaaa", "admin")).trustStore("../config/cert.pem").validateHostName(false).build();
@Cleanup StreamManager streamManager = StreamManager.create(clientConfig);
streamManager.createScope(scope);
Assert.assertTrue("Stream creation is not successful ", streamManager.createStream(scope, streamName, StreamConfiguration.builder().scope(scope).streamName(streamName).scalingPolicy(ScalingPolicy.fixed(numSegments)).build()));
log.info("Created stream: " + streamName);
ClientFactory clientFactory = ClientFactory.withScope(scope, clientConfig);
EventStreamWriter<String> writer = clientFactory.createEventWriter(streamName, new JavaSerializer<String>(), EventWriterConfig.builder().build());
log.info("Created writer for stream: " + streamName);
writer.writeEvent("hello").get();
log.info("Wrote data to the stream");
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndAutoScaleUpTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
int port = Config.SERVICE_PORT;
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port, false);
Controller controller = controllerWrapper.getController();
ClientFactory internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, new ConnectionFactoryImpl(ClientConfig.builder().build()));
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
@Cleanup SegmentStatsFactory segmentStatsFactory = new SegmentStatsFactory();
SegmentStatsRecorder statsRecorder = segmentStatsFactory.createSegmentStatsRecorder(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).build());
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, "localhost", 12345, store, statsRecorder, null, null, null);
server.startListening();
controllerWrapper.awaitRunning();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(CONFIG).get();
@Cleanup MockClientFactory clientFactory = new MockClientFactory("test", controller);
// Mocking pravega service by putting scale up and scale down requests for the stream
EventStreamWriter<String> test = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
// keep writing. Scale should happen
long start = System.currentTimeMillis();
char[] chars = new char[1];
Arrays.fill(chars, 'a');
String str = new String(chars);
CompletableFuture.runAsync(() -> {
while (System.currentTimeMillis() - start < Duration.ofMinutes(3).toMillis()) {
try {
test.writeEvent("0", str).get();
} catch (Throwable e) {
System.err.println("test exception writing events " + e.getMessage());
break;
}
}
});
Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
if (streamSegments.getSegments().size() > 3) {
System.err.println("Success");
log.info("Success");
System.exit(0);
} else {
throw new NotDoneException();
}
}), Executors.newSingleThreadScheduledExecutor()).exceptionally(e -> {
System.err.println("Failure");
log.error("Failure");
System.exit(1);
return null;
}).get();
} catch (Throwable e) {
System.err.print("Test failed with exception: " + e.getMessage());
System.exit(-1);
}
System.exit(0);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class EndToEndAutoScaleDownTest method main.
public static void main(String[] args) throws Exception {
try {
@Cleanup TestingServer zkTestServer = new TestingServerStarter().start();
int port = Config.SERVICE_PORT;
@Cleanup ControllerWrapper controllerWrapper = new ControllerWrapper(zkTestServer.getConnectString(), port, false);
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope(NameUtils.INTERNAL_SCOPE_NAME).get();
ClientFactory internalCF = new ClientFactoryImpl(NameUtils.INTERNAL_SCOPE_NAME, controller, new ConnectionFactoryImpl(ClientConfig.builder().build()));
ServiceBuilder serviceBuilder = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.getDefaultConfig());
serviceBuilder.initialize();
StreamSegmentStore store = serviceBuilder.createStreamSegmentService();
@Cleanup SegmentStatsFactory segmentStatsFactory = new SegmentStatsFactory();
SegmentStatsRecorder statsRecorder = segmentStatsFactory.createSegmentStatsRecorder(store, internalCF, AutoScalerConfig.builder().with(AutoScalerConfig.MUTE_IN_SECONDS, 0).with(AutoScalerConfig.COOLDOWN_IN_SECONDS, 0).with(AutoScalerConfig.CACHE_CLEANUP_IN_SECONDS, 5).with(AutoScalerConfig.CACHE_EXPIRY_IN_SECONDS, 30).build());
@Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, "localhost", 12345, store, statsRecorder, null, null, null);
server.startListening();
controllerWrapper.awaitRunning();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(CONFIG).get();
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
Retry.withExpBackoff(10, 10, 100, 10000).retryingOn(NotDoneException.class).throwingOn(RuntimeException.class).runAsync(() -> controller.getCurrentSegments("test", "test").thenAccept(streamSegments -> {
if (streamSegments.getSegments().size() < 3) {
System.err.println("Success");
log.info("Success");
System.exit(0);
} else {
throw new NotDoneException();
}
}), Executors.newSingleThreadScheduledExecutor()).exceptionally(e -> {
System.err.println("Failure");
log.error("Failure");
System.exit(1);
return null;
}).get();
} catch (Throwable e) {
System.err.print("Test failed with exception: " + e.getMessage());
System.exit(-1);
}
System.exit(0);
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class ControllerRestApiTest method restApiTests.
@Test
public void restApiTests() {
Invocation.Builder builder;
Response response;
restServerURI = SETUP_UTILS.getControllerRestUri().toString();
log.info("REST Server URI: {}", restServerURI);
// TEST REST server status, ping test
resourceURl = new StringBuilder(restServerURI).append("/ping").toString();
webTarget = client.target(resourceURl);
builder = webTarget.request();
response = builder.get();
assertEquals("Ping test", OK.getStatusCode(), response.getStatus());
log.info("REST Server is running. Ping successful.");
final String scope1 = RandomStringUtils.randomAlphanumeric(10);
final String stream1 = RandomStringUtils.randomAlphanumeric(10);
// TEST CreateScope POST http://controllerURI:Port/v1/scopes
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes").toString();
webTarget = client.target(resourceURl);
final CreateScopeRequest createScopeRequest = new CreateScopeRequest();
createScopeRequest.setScopeName(scope1);
builder = webTarget.request(MediaType.APPLICATION_JSON_TYPE);
response = builder.post(Entity.json(createScopeRequest));
assertEquals("Create scope status", CREATED.getStatusCode(), response.getStatus());
Assert.assertEquals("Create scope response", scope1, response.readEntity(ScopeProperty.class).getScopeName());
log.info("Create scope: {} successful ", scope1);
// TEST CreateStream POST http://controllerURI:Port/v1/scopes/{scopeName}/streams
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams").toString();
webTarget = client.target(resourceURl);
CreateStreamRequest createStreamRequest = new CreateStreamRequest();
ScalingConfig scalingConfig = new ScalingConfig();
scalingConfig.setType(ScalingConfig.TypeEnum.FIXED_NUM_SEGMENTS);
scalingConfig.setTargetRate(2);
scalingConfig.scaleFactor(2);
scalingConfig.minSegments(2);
RetentionConfig retentionConfig = new RetentionConfig();
retentionConfig.setType(RetentionConfig.TypeEnum.LIMITED_DAYS);
retentionConfig.setValue(123L);
createStreamRequest.setStreamName(stream1);
createStreamRequest.setScalingPolicy(scalingConfig);
createStreamRequest.setRetentionPolicy(retentionConfig);
builder = webTarget.request(MediaType.APPLICATION_JSON_TYPE);
response = builder.post(Entity.json(createStreamRequest));
assertEquals("Create stream status", CREATED.getStatusCode(), response.getStatus());
final StreamProperty streamPropertyResponse = response.readEntity(StreamProperty.class);
assertEquals("Scope name in response", scope1, streamPropertyResponse.getScopeName());
assertEquals("Stream name in response", stream1, streamPropertyResponse.getStreamName());
log.info("Create stream: {} successful", stream1);
// Test listScopes GET http://controllerURI:Port/v1/scopes/{scopeName}/streams
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes").toString();
webTarget = client.target(resourceURl);
builder = webTarget.request();
response = builder.get();
assertEquals("List scopes", OK.getStatusCode(), response.getStatus());
log.info("List scopes successful");
// Test listStream GET /v1/scopes/scope1/streams
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams").toString();
webTarget = client.target(resourceURl);
builder = webTarget.request();
response = builder.get();
assertEquals("List streams", OK.getStatusCode(), response.getStatus());
Assert.assertEquals("List streams size", 1, response.readEntity(StreamsList.class).getStreams().size());
log.info("List streams successful");
// Test getScope
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1).toString();
response = client.target(resourceURl).request().get();
assertEquals("Get scope status", OK.getStatusCode(), response.getStatus());
assertEquals("Get scope scope1 response", scope1, response.readEntity(ScopeProperty.class).getScopeName());
log.info("Get scope successful");
// Test updateStream
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams/" + stream1).toString();
UpdateStreamRequest updateStreamRequest = new UpdateStreamRequest();
ScalingConfig scalingConfig1 = new ScalingConfig();
scalingConfig1.setType(ScalingConfig.TypeEnum.FIXED_NUM_SEGMENTS);
scalingConfig1.setTargetRate(2);
// update existing scaleFactor from 2 to 3
scalingConfig1.scaleFactor(3);
// update existing minSegments from 2 to 4
scalingConfig1.minSegments(4);
updateStreamRequest.setScalingPolicy(scalingConfig1);
updateStreamRequest.setRetentionPolicy(retentionConfig);
response = client.target(resourceURl).request(MediaType.APPLICATION_JSON_TYPE).put(Entity.json(updateStreamRequest));
assertEquals("Update stream status", OK.getStatusCode(), response.getStatus());
assertEquals("Verify updated property", 4, response.readEntity(StreamProperty.class).getScalingPolicy().getMinSegments().intValue());
log.info("Update stream successful");
// Test getStream
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams/" + stream1).toString();
response = client.target(resourceURl).request().get();
assertEquals("Get stream status", OK.getStatusCode(), response.getStatus());
assertEquals("Get stream stream1 response", stream1, response.readEntity(StreamProperty.class).getStreamName());
log.info("Get stream successful");
// Test updateStreamState
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams/" + stream1 + "/state").toString();
StreamState streamState = new StreamState();
streamState.setStreamState(StreamState.StreamStateEnum.SEALED);
response = client.target(resourceURl).request(MediaType.APPLICATION_JSON_TYPE).put(Entity.json(streamState));
assertEquals("UpdateStreamState status", OK.getStatusCode(), response.getStatus());
assertEquals("UpdateStreamState status in response", streamState.getStreamState(), response.readEntity(StreamState.class).getStreamState());
log.info("Update stream state successful");
// Test deleteStream
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1 + "/streams/" + stream1).toString();
response = client.target(resourceURl).request().delete();
assertEquals("DeleteStream status", NO_CONTENT.getStatusCode(), response.getStatus());
log.info("Delete stream successful");
// Test deleteScope
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + scope1).toString();
response = client.target(resourceURl).request().delete();
assertEquals("Get scope status", NO_CONTENT.getStatusCode(), response.getStatus());
log.info("Delete Scope successful");
// Test reader groups APIs.
// Prepare the streams and readers using the admin client.
final String testScope = RandomStringUtils.randomAlphanumeric(10);
final String testStream1 = RandomStringUtils.randomAlphanumeric(10);
final String testStream2 = RandomStringUtils.randomAlphanumeric(10);
URI controllerUri = SETUP_UTILS.getControllerUri();
@Cleanup("shutdown") InlineExecutor inlineExecutor = new InlineExecutor();
try (StreamManager streamManager = new StreamManagerImpl(createController(controllerUri, inlineExecutor))) {
log.info("Creating scope: {}", testScope);
streamManager.createScope(testScope);
log.info("Creating stream: {}", testStream1);
StreamConfiguration streamConf1 = StreamConfiguration.builder().scope(testScope).streamName(testStream1).scalingPolicy(ScalingPolicy.fixed(1)).build();
streamManager.createStream(testScope, testStream1, streamConf1);
log.info("Creating stream: {}", testStream2);
StreamConfiguration streamConf2 = StreamConfiguration.builder().scope(testScope).streamName(testStream2).scalingPolicy(ScalingPolicy.fixed(1)).build();
streamManager.createStream(testScope, testStream2, streamConf2);
}
final String readerGroupName1 = RandomStringUtils.randomAlphanumeric(10);
final String readerGroupName2 = RandomStringUtils.randomAlphanumeric(10);
final String reader1 = RandomStringUtils.randomAlphanumeric(10);
final String reader2 = RandomStringUtils.randomAlphanumeric(10);
try (ClientFactory clientFactory = new ClientFactoryImpl(testScope, createController(controllerUri, inlineExecutor));
ReaderGroupManager readerGroupManager = ReaderGroupManager.withScope(testScope, ClientConfig.builder().controllerURI(controllerUri).build())) {
readerGroupManager.createReaderGroup(readerGroupName1, ReaderGroupConfig.builder().stream(Stream.of(testScope, testStream1)).stream(Stream.of(testScope, testStream2)).build());
readerGroupManager.createReaderGroup(readerGroupName2, ReaderGroupConfig.builder().stream(Stream.of(testScope, testStream1)).stream(Stream.of(testScope, testStream2)).build());
clientFactory.createReader(reader1, readerGroupName1, new JavaSerializer<Long>(), ReaderConfig.builder().build());
clientFactory.createReader(reader2, readerGroupName1, new JavaSerializer<Long>(), ReaderConfig.builder().build());
}
// Test fetching readergroups.
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + testScope + "/readergroups").toString();
response = client.target(resourceURl).request().get();
assertEquals("Get readergroups status", OK.getStatusCode(), response.getStatus());
ReaderGroupsList readerGroupsList = response.readEntity(ReaderGroupsList.class);
assertEquals("Get readergroups size", 2, readerGroupsList.getReaderGroups().size());
assertTrue(readerGroupsList.getReaderGroups().contains(new ReaderGroupsListReaderGroups().readerGroupName(readerGroupName1)));
assertTrue(readerGroupsList.getReaderGroups().contains(new ReaderGroupsListReaderGroups().readerGroupName(readerGroupName2)));
log.info("Get readergroups successful");
// Test fetching readergroup info.
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + testScope + "/readergroups/" + readerGroupName1).toString();
response = client.target(resourceURl).request().get();
assertEquals("Get readergroup properties status", OK.getStatusCode(), response.getStatus());
ReaderGroupProperty readerGroupProperty = response.readEntity(ReaderGroupProperty.class);
assertEquals("Get readergroup name", readerGroupName1, readerGroupProperty.getReaderGroupName());
assertEquals("Get readergroup scope name", testScope, readerGroupProperty.getScopeName());
assertEquals("Get readergroup streams size", 2, readerGroupProperty.getStreamList().size());
assertTrue(readerGroupProperty.getStreamList().contains(testStream1));
assertTrue(readerGroupProperty.getStreamList().contains(testStream2));
assertEquals("Get readergroup onlinereaders size", 2, readerGroupProperty.getOnlineReaderIds().size());
assertTrue(readerGroupProperty.getOnlineReaderIds().contains(reader1));
assertTrue(readerGroupProperty.getOnlineReaderIds().contains(reader2));
// Test readergroup or scope not found.
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + testScope + "/readergroups/" + "unknownreadergroup").toString();
response = client.target(resourceURl).request().get();
assertEquals("Get readergroup properties status", NOT_FOUND.getStatusCode(), response.getStatus());
resourceURl = new StringBuilder(restServerURI).append("/v1/scopes/" + "unknownscope" + "/readergroups/" + readerGroupName1).toString();
response = client.target(resourceURl).request().get();
assertEquals("Get readergroup properties status", NOT_FOUND.getStatusCode(), response.getStatus());
log.info("Get readergroup properties successful");
log.info("Test restApiTests passed successfully!");
}
use of io.pravega.client.ClientFactory in project pravega by pravega.
the class StreamCutsTest method testReaderGroupCuts.
@Test(timeout = 40000)
public void testReaderGroupCuts() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "fpj was here").get();
writer.writeEvent("0", "fpj was here again").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
ReaderGroup readerGroup = groupManager.createReaderGroup("cuts", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "cuts", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> firstEvent = reader.readNextEvent(15000);
EventRead<String> secondEvent = reader.readNextEvent(15000);
assertNotNull(firstEvent);
assertEquals("fpj was here", firstEvent.getEvent());
assertNotNull(secondEvent);
assertEquals("fpj was here again", secondEvent.getEvent());
Map<Stream, StreamCut> cuts = readerGroup.getStreamCuts();
validateCuts(readerGroup, cuts, Collections.singleton("test/test/0"));
// Scale the stream to verify that we get more segments in the cut.
Stream stream = Stream.of("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.5);
map.put(0.5, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 1st scaling");
writer.writeEvent("0", "fpj was here again").get();
writer.writeEvent("1", "fpj was here again").get();
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
HashSet<String> segmentNames = new HashSet<>();
segmentNames.add("test/test/1");
segmentNames.add("test/test/2");
validateCuts(readerGroup, cuts, Collections.unmodifiableSet(segmentNames));
// Scale down to verify that the number drops back.
map = new HashMap<>();
map.put(0.0, 1.0);
ArrayList<Integer> toSeal = new ArrayList<>();
toSeal.add(1);
toSeal.add(2);
result = controller.scaleStream(stream, Collections.unmodifiableList(toSeal), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 2nd scaling");
writer.writeEvent("0", "fpj was here again").get();
reader.readNextEvent(15000);
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
validateCuts(readerGroup, cuts, Collections.singleton("test/test/3"));
// Scale up to 4 segments again.
map = new HashMap<>();
map.put(0.0, 0.25);
map.put(0.25, 0.5);
map.put(0.5, 0.75);
map.put(0.75, 1.0);
result = controller.scaleStream(stream, Collections.singletonList(3), map, executor).getFuture().get();
assertTrue(result);
log.info("Finished 3rd scaling");
writer.writeEvent("0", "fpj was here again").get();
reader.readNextEvent(15000);
cuts = readerGroup.getStreamCuts();
segmentNames = new HashSet<>();
segmentNames.add("test/test/4");
segmentNames.add("test/test/5");
segmentNames.add("test/test/6");
segmentNames.add("test/test/7");
validateCuts(readerGroup, cuts, Collections.unmodifiableSet(segmentNames));
}
Aggregations