use of io.pravega.client.connection.impl.ConnectionFactory in project pravega by pravega.
the class ScopeTest method testDeleteScopeRecursive.
@Test
public void testDeleteScopeRecursive() throws Exception {
final String scope = "testDeleteScope";
final String testFalseScope = "falseScope";
final String streamName1 = "test1";
final String streamName2 = "test2";
final String streamName3 = "test3";
final String streamName4 = "test4";
final String kvtName1 = "kvt1";
final String kvtName2 = "kvt2";
final String kvtName3 = "kvt3";
final String groupName1 = "rg1";
final String groupName2 = "rg2";
final String groupName3 = "rg3";
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
@Cleanup Controller controller = controllerWrapper.getController();
ClientConfig clientConfig = ClientConfig.builder().controllerURI(URI.create("tcp://localhost:" + controllerPort)).build();
@Cleanup ConnectionPool cp = new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig));
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(clientConfig);
// Create scope
controllerWrapper.getControllerService().createScope(scope, 0L).get();
assertTrue(controller.checkScopeExists(scope).get());
// Create streams
assertTrue(controller.createStream(scope, streamName1, config).get());
assertTrue(controller.createStream(scope, streamName2, config).get());
assertTrue(controller.createStream(scope, streamName3, config).get());
@Cleanup StreamManager streamManager = new StreamManagerImpl(controller, cp);
@Cleanup KeyValueTableManager keyValueTableManager = new KeyValueTableManagerImpl(clientConfig);
@Cleanup ReaderGroupManager readerGroupManager = new ReaderGroupManagerImpl(scope, clientConfig, connectionFactory);
// 1. Call deleteScopeRecursive() without creating a scope
assertTrue(streamManager.deleteScopeRecursive(testFalseScope));
// Create KVT under the scope
KeyValueTableConfiguration kvtConfig = KeyValueTableConfiguration.builder().partitionCount(2).primaryKeyLength(4).secondaryKeyLength(4).build();
assertTrue(keyValueTableManager.createKeyValueTable(scope, kvtName1, kvtConfig));
assertTrue(keyValueTableManager.createKeyValueTable(scope, kvtName2, kvtConfig));
// Create RG under the same scope
assertTrue(readerGroupManager.createReaderGroup(groupName1, ReaderGroupConfig.builder().stream(getScopedStreamName(scope, streamName1)).build()));
assertTrue(readerGroupManager.createReaderGroup(groupName2, ReaderGroupConfig.builder().stream(getScopedStreamName(scope, streamName2)).build()));
// Call deleteScopeRecursive to delete the scope recursively
assertTrue(streamManager.deleteScopeRecursive(scope));
// Validate that the scope is deleted
assertFalse(controller.checkScopeExists(scope).get());
// Validate create operation of Stream/RG/KVT should throw error
AssertExtensions.assertThrows("Failed to create Reader Group as Scope does not exits", () -> readerGroupManager.createReaderGroup(groupName3, ReaderGroupConfig.builder().stream(getScopedStreamName(scope, streamName2)).build()), e -> e instanceof IllegalArgumentException);
AssertExtensions.assertThrows("Scope does not exist", () -> controller.createStream(scope, streamName4, config).get(), e -> e instanceof IllegalArgumentException);
AssertExtensions.assertThrows("Scope does not exist", () -> keyValueTableManager.createKeyValueTable(scope, kvtName3, kvtConfig), e -> e instanceof IllegalArgumentException);
}
use of io.pravega.client.connection.impl.ConnectionFactory in project pravega by pravega.
the class StreamMetadataTasksTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
StreamMetrics.initialize();
TransactionMetrics.initialize();
StreamMetadataStore streamStore = getStore();
// create a partial mock.
streamStorePartialMock = spy(streamStore);
ImmutableMap<BucketStore.ServiceType, Integer> map = ImmutableMap.of(BucketStore.ServiceType.RetentionService, 1, BucketStore.ServiceType.WatermarkingService, 1);
bucketStore = StreamStoreFactory.createInMemoryBucketStore(map);
kvtStore = spy(getKvtStore());
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().build());
EventHelper helper = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = spy(new StreamMetadataTasks(streamStorePartialMock, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", new GrpcAuthHelper(authEnabled, "key", 300), helper));
EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStorePartialMock, segmentHelperMock, executor, "host", new GrpcAuthHelper(authEnabled, "key", 300));
this.streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStorePartialMock, executor), new ScaleOperationTask(streamMetadataTasks, streamStorePartialMock, executor), new UpdateStreamTask(streamMetadataTasks, streamStorePartialMock, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStorePartialMock, executor), new DeleteStreamTask(streamMetadataTasks, streamStorePartialMock, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStorePartialMock, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStorePartialMock, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStorePartialMock, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStorePartialMock, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
commitWriter = new EventStreamWriterMock<>();
abortWriter = new EventStreamWriterMock<>();
streamTransactionMetadataTasks.initializeStreamWriters(commitWriter, abortWriter);
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scalingPolicy(policy1).build();
streamStorePartialMock.createScope(SCOPE, null, executor).join();
// stream1
long start = System.currentTimeMillis();
streamStorePartialMock.createStream(SCOPE, stream1, configuration1, start, null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Long> sealedSegments = Collections.singletonList(1L);
VersionedMetadata<EpochTransitionRecord> response = streamStorePartialMock.submitScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, null, null, executor).get();
VersionedMetadata<State> state = streamStorePartialMock.getVersionedState(SCOPE, stream1, null, executor).join();
state = streamStorePartialMock.updateVersionedState(SCOPE, stream1, State.SCALING, state, null, executor).join();
streamStorePartialMock.startScale(SCOPE, stream1, false, response, state, null, executor).join();
streamStorePartialMock.scaleCreateNewEpochs(SCOPE, stream1, response, null, executor).get();
streamStorePartialMock.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), response, null, executor).get();
streamStorePartialMock.completeScale(SCOPE, stream1, response, null, executor).join();
streamStorePartialMock.updateVersionedState(SCOPE, stream1, State.ACTIVE, state, null, executor).get();
// stream2
streamStorePartialMock.createStream(SCOPE, stream2, configuration1, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
streamStorePartialMock.createStream(SCOPE, stream3, configuration1, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
}
use of io.pravega.client.connection.impl.ConnectionFactory in project pravega by pravega.
the class StreamTransactionMetadataTasksTest method setup.
@Before
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting ZK server", e);
}
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
streamStore = StreamStoreFactory.createZKStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
bucketStore = StreamStoreFactory.createInMemoryBucketStore();
connectionFactory = Mockito.mock(ConnectionFactory.class);
segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", GrpcAuthHelper.getDisabledAuthHelper());
StreamMetrics.initialize();
TransactionMetrics.initialize();
}
use of io.pravega.client.connection.impl.ConnectionFactory in project pravega by pravega.
the class TimeoutServiceZkStoreTest method getSegmentHelper.
@Override
SegmentHelper getSegmentHelper() {
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
ConnectionFactory connectionFactory = mock(ConnectionFactory.class);
return SegmentHelperMock.getSegmentHelperMock();
}
use of io.pravega.client.connection.impl.ConnectionFactory in project pravega by pravega.
the class PravegaTest method simpleTest.
/**
* Invoke the simpleTest, ensure we are able to produce events.
* The test fails incase of exceptions while writing to the stream.
*/
@Test
public void simpleTest() {
Service conService = Utils.createPravegaControllerService(null);
List<URI> ctlURIs = conService.getServiceDetails();
URI controllerUri = ctlURIs.get(0);
log.info("Invoking create stream with Controller URI: {}", controllerUri);
@Cleanup ConnectionFactory connectionFactory = new SocketConnectionFactoryImpl(Utils.buildClientConfig(controllerUri));
@Cleanup ControllerImpl controller = new ControllerImpl(ControllerImplConfig.builder().clientConfig(Utils.buildClientConfig(controllerUri)).build(), connectionFactory.getInternalExecutor());
assertTrue(controller.createScope(STREAM_SCOPE).join());
assertTrue(controller.createStream(STREAM_SCOPE, STREAM_NAME, config).join());
@Cleanup EventStreamClientFactory clientFactory = EventStreamClientFactory.withScope(STREAM_SCOPE, Utils.buildClientConfig(controllerUri));
log.info("Invoking Writer test with Controller URI: {}", controllerUri);
@Cleanup EventStreamWriter<Serializable> writer = clientFactory.createEventWriter(STREAM_NAME, new JavaSerializer<>(), EventWriterConfig.builder().build());
for (int i = 0; i < NUM_EVENTS; i++) {
String event = "Publish " + i + "\n";
log.debug("Producing event: {} ", event);
// any exceptions while writing the event will fail the test.
writer.writeEvent("", event);
writer.flush();
}
log.info("Invoking Reader test.");
ReaderGroupManager groupManager = ReaderGroupManager.withScope(STREAM_SCOPE, Utils.buildClientConfig(controllerUri));
groupManager.createReaderGroup(READER_GROUP, ReaderGroupConfig.builder().stream(Stream.of(STREAM_SCOPE, STREAM_NAME)).build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader(UUID.randomUUID().toString(), READER_GROUP, new JavaSerializer<>(), ReaderConfig.builder().build());
int readCount = 0;
EventRead<String> event = null;
do {
event = reader.readNextEvent(10_000);
log.debug("Read event: {}.", event.getEvent());
if (event.getEvent() != null) {
readCount++;
}
// try reading until all the written events are read, else the test will timeout.
} while ((event.getEvent() != null || event.isCheckpoint()) && readCount < NUM_EVENTS);
assertEquals("Read count should be equal to write count", NUM_EVENTS, readCount);
}
Aggregations