use of io.pravega.controller.server.SegmentHelper in project pravega by pravega.
the class IntermittentCnxnFailureTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
streamStore = StreamStoreFactory.createZKStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
segmentHelperMock = spy(new SegmentHelper());
doReturn(Controller.NodeUri.newBuilder().setEndpoint("localhost").setPort(Config.SERVICE_PORT).build()).when(segmentHelperMock).getSegmentUri(anyString(), anyString(), anyInt(), any());
ConnectionFactoryImpl connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
streamMetadataTasks = new StreamMetadataTasks(streamStore, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, false, "");
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, hostStore, segmentHelperMock, executor, "host", connectionFactory, false, "");
controllerService = new ControllerService(streamStore, hostStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null);
controllerService.createScope(SCOPE).get();
}
use of io.pravega.controller.server.SegmentHelper in project pravega by pravega.
the class SegmentHelperMock method getFailingSegmentHelperMock.
public static SegmentHelper getFailingSegmentHelperMock() {
SegmentHelper helper = spy(new SegmentHelper());
doReturn(NodeUri.newBuilder().setEndpoint("localhost").setPort(SERVICE_PORT).build()).when(helper).getSegmentUri(anyString(), anyString(), anyInt(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).sealSegment(anyString(), anyString(), anyInt(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).createSegment(anyString(), anyString(), anyInt(), any(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).deleteSegment(anyString(), anyString(), anyInt(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).createTransaction(anyString(), anyString(), anyInt(), any(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).abortTransaction(anyString(), anyString(), anyInt(), any(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).commitTransaction(anyString(), anyString(), anyInt(), any(), any(), any(), any());
doReturn(Futures.failedFuture(new RuntimeException())).when(helper).updatePolicy(anyString(), anyString(), any(), anyInt(), any(), any(), any());
return helper;
}
use of io.pravega.controller.server.SegmentHelper in project pravega by pravega.
the class StreamCutServiceTest method setup.
@Before
public void setup() throws Exception {
executor = Executors.newScheduledThreadPool(10);
hostId = UUID.randomUUID().toString();
streamMetadataStore = createStore(3, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createInMemoryStore(executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
streamMetadataTasks = new StreamMetadataTasks(streamMetadataStore, hostStore, taskMetadataStore, segmentHelper, executor, hostId, connectionFactory, false, "");
service = new StreamCutService(3, hostId, streamMetadataStore, streamMetadataTasks, executor);
service.startAsync();
service.awaitRunning();
}
use of io.pravega.controller.server.SegmentHelper in project pravega by pravega.
the class ZkStoreRetentionTest method testOwnershipOfExistingBucket.
@Test(timeout = 10000)
public void testOwnershipOfExistingBucket() throws Exception {
TestingServer zkServer2 = new TestingServerStarter().start();
zkServer2.start();
CuratorFramework zkClient2 = CuratorFrameworkFactory.newClient(zkServer2.getConnectString(), 10000, 1000, (r, e, s) -> false);
zkClient2.start();
ScheduledExecutorService executor2 = Executors.newScheduledThreadPool(10);
String hostId = UUID.randomUUID().toString();
StreamMetadataStore streamMetadataStore2 = StreamStoreFactory.createZKStore(zkClient2, 1, executor2);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createInMemoryStore(executor2);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
ConnectionFactoryImpl connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
StreamMetadataTasks streamMetadataTasks2 = new StreamMetadataTasks(streamMetadataStore2, hostStore, taskMetadataStore, segmentHelper, executor2, hostId, connectionFactory, false, "");
String scope = "scope1";
String streamName = "stream1";
streamMetadataStore2.addUpdateStreamForAutoStreamCut(scope, streamName, RetentionPolicy.builder().build(), null, executor2).join();
String scope2 = "scope2";
String streamName2 = "stream2";
streamMetadataStore2.addUpdateStreamForAutoStreamCut(scope2, streamName2, RetentionPolicy.builder().build(), null, executor2).join();
StreamCutService service2 = new StreamCutService(1, hostId, streamMetadataStore2, streamMetadataTasks2, executor2);
service2.startAsync();
service2.awaitRunning();
assertTrue(service2.getBuckets().stream().allMatch(x -> x.getRetentionFutureMap().size() == 2));
service2.stopAsync();
service2.awaitTerminated();
zkClient2.close();
zkServer2.close();
streamMetadataTasks2.close();
connectionFactory.close();
executor2.shutdown();
}
use of io.pravega.controller.server.SegmentHelper in project pravega by pravega.
the class StreamMetadataTasksTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
StreamMetadataStore streamStore = StreamStoreFactory.createInMemoryStore(1, executor);
// create a partial mock.
streamStorePartialMock = spy(streamStore);
doReturn(CompletableFuture.completedFuture(false)).when(streamStorePartialMock).isTransactionOngoing(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
streamMetadataTasks = spy(new StreamMetadataTasks(streamStorePartialMock, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key"));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStorePartialMock, hostStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key");
this.streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStorePartialMock, executor), new ScaleOperationTask(streamMetadataTasks, streamStorePartialMock, executor), new UpdateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new SealStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new DeleteStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new TruncateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), executor);
consumer = new ControllerService(streamStorePartialMock, hostStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null);
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
streamStorePartialMock.createScope(SCOPE).join();
long start = System.currentTimeMillis();
streamStorePartialMock.createStream(SCOPE, stream1, configuration1, start, null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
StartScaleResponse response = streamStorePartialMock.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, false, null, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
streamStorePartialMock.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStorePartialMock.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
streamStorePartialMock.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
}
Aggregations