use of io.pravega.test.common.TestingServerStarter in project pravega by pravega.
the class StreamTransactionMetadataTasksTest method setup.
@Before
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting ZK server", e);
}
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
streamStore = StreamStoreFactory.createZKStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = Mockito.mock(ConnectionFactory.class);
streamMetadataTasks = new StreamMetadataTasks(streamStore, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, this.authEnabled, "secret");
}
use of io.pravega.test.common.TestingServerStarter in project pravega by pravega.
the class ControllerClusterListenerTest method setup.
@Before
public void setup() throws Exception {
// 1. Start ZK server.
zkServer = new TestingServerStarter().start();
// 2. Start ZK client.
curatorClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
curatorClient.start();
// 3. Start executor service.
executor = Executors.newScheduledThreadPool(5);
// 4. start cluster event listener
clusterZK = new ClusterZKImpl(curatorClient, ClusterType.CONTROLLER);
clusterZK.addListener((eventType, host) -> {
switch(eventType) {
case HOST_ADDED:
nodeAddedQueue.offer(host.getHostId());
break;
case HOST_REMOVED:
nodeRemovedQueue.offer(host.getHostId());
break;
case ERROR:
default:
break;
}
});
}
use of io.pravega.test.common.TestingServerStarter in project pravega by pravega.
the class ControllerServiceWithZKStreamTest method setup.
@Before
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting ZK server", e);
}
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
StreamMetadataStore streamStore = StreamStoreFactory.createZKStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
streamMetadataTasks = new StreamMetadataTasks(streamStore, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, false, "");
this.streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, executor), new SealStreamTask(streamMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), executor);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, hostStore, segmentHelperMock, executor, "host", connectionFactory, false, "");
consumer = new ControllerService(streamStore, hostStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null);
}
use of io.pravega.test.common.TestingServerStarter in project pravega by pravega.
the class ZKControllerServiceStarterTest method setup.
@Override
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting test zk server");
Assert.fail("Error starting test zk server");
}
ZKClientConfig zkClientConfig = ZKClientConfigImpl.builder().connectionString(zkServer.getConnectString()).initialSleepInterval(500).maxRetries(10).namespace("pravega/" + UUID.randomUUID()).sessionTimeoutMs(10 * 1000).build();
storeClientConfig = StoreClientConfigImpl.withZKClient(zkClientConfig);
storeClient = StoreClientFactory.createStoreClient(storeClientConfig);
Assert.assertNotNull(storeClient);
}
use of io.pravega.test.common.TestingServerStarter in project pravega by pravega.
the class ScaleRequestHandlerTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(20, 1, 50));
zkClient.start();
String hostId;
try {
// On each controller process restart, it gets a fresh hostId,
// which is a combination of hostname and random GUID.
hostId = InetAddress.getLocalHost().getHostAddress() + UUID.randomUUID().toString();
} catch (UnknownHostException e) {
hostId = UUID.randomUUID().toString();
}
streamStore = StreamStoreFactory.createZKStore(zkClient, executor);
taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
clientFactory = mock(ClientFactory.class);
streamMetadataTasks = new StreamMetadataTasks(streamStore, hostStore, taskMetadataStore, segmentHelper, executor, hostId, connectionFactory, false, "");
streamMetadataTasks.initializeStreamWriters(clientFactory, Config.SCALE_STREAM_NAME);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, hostStore, segmentHelper, executor, hostId, connectionFactory, false, "");
long createTimestamp = System.currentTimeMillis();
// add a host in zk
// mock pravega
// create a stream
streamStore.createScope(scope).get();
streamMetadataTasks.createStream(scope, stream, config, createTimestamp).get();
}
Aggregations