use of io.pravega.controller.store.stream.StartScaleResponse in project pravega by pravega.
the class TaskTest method setUp.
@Before
public void setUp() throws ExecutionException, InterruptedException {
final String stream2 = "stream2";
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final ScalingPolicy policy2 = ScalingPolicy.fixed(3);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
final StreamConfiguration configuration2 = StreamConfiguration.builder().scope(SCOPE).streamName(stream2).scalingPolicy(policy2).build();
// region createStream
streamStore.createScope(SCOPE).join();
long start = System.currentTimeMillis();
streamStore.createStream(SCOPE, stream1, configuration1, start, null, executor).join();
streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).join();
streamStore.createStream(SCOPE, stream2, configuration2, start, null, executor).join();
streamStore.setState(SCOPE, stream2, State.ACTIVE, null, executor).join();
// endregion
// region scaleSegments
AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
StartScaleResponse response = streamStore.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, false, null, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
streamStore.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
AbstractMap.SimpleEntry<Double, Double> segment3 = new AbstractMap.SimpleEntry<>(0.0, 0.5);
AbstractMap.SimpleEntry<Double, Double> segment4 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment5 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments1 = Arrays.asList(0, 1, 2);
response = streamStore.startScale(SCOPE, stream2, sealedSegments1, Arrays.asList(segment3, segment4, segment5), start + 20, false, null, executor).get();
segmentsCreated = response.getSegmentsCreated();
streamStore.setState(SCOPE, stream2, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream2, sealedSegments1, segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream2, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
// endregion
}
use of io.pravega.controller.store.stream.StartScaleResponse in project pravega by pravega.
the class StreamMetadataTasksTest method manualScaleTest.
@Test(timeout = 30000)
public void manualScaleTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(1);
final StreamConfiguration configuration = StreamConfiguration.builder().scope(SCOPE).streamName("test").scalingPolicy(policy).build();
streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
List<AbstractMap.SimpleEntry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.5));
newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 1.0));
ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(0), newRanges, 30, null).get();
assertEquals(ScaleStreamStatus.STARTED, scaleOpResult.getStatus());
OperationContext context = streamStorePartialMock.createContext(SCOPE, "test");
assertEquals(streamStorePartialMock.getState(SCOPE, "test", false, context, executor).get(), State.ACTIVE);
// Now when startScale runs even after that we should get the state as active.
StartScaleResponse response = streamStorePartialMock.startScale(SCOPE, "test", Collections.singletonList(0), newRanges, 30, true, null, executor).get();
assertEquals(response.getActiveEpoch(), 0);
assertEquals(streamStorePartialMock.getState(SCOPE, "test", true, context, executor).get(), State.ACTIVE);
AssertExtensions.assertThrows("", () -> streamStorePartialMock.scaleNewSegmentsCreated(SCOPE, "test", Collections.singletonList(0), response.getSegmentsCreated(), response.getActiveEpoch(), 30, context, executor).get(), ex -> Exceptions.unwrap(ex) instanceof StoreException.IllegalStateException);
List<Segment> segments = streamMetadataTasks.startScale((ScaleOpEvent) requestEventWriter.getEventQueue().take(), true, context, "").get();
assertTrue(segments.stream().anyMatch(x -> x.getNumber() == 1 && x.getKeyStart() == 0.0 && x.getKeyEnd() == 0.5));
assertTrue(segments.stream().anyMatch(x -> x.getNumber() == 2 && x.getKeyStart() == 0.5 && x.getKeyEnd() == 1.0));
}
use of io.pravega.controller.store.stream.StartScaleResponse in project pravega by pravega.
the class StreamMetadataTasksTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
StreamMetadataStore streamStore = StreamStoreFactory.createInMemoryStore(1, executor);
// create a partial mock.
streamStorePartialMock = spy(streamStore);
doReturn(CompletableFuture.completedFuture(false)).when(streamStorePartialMock).isTransactionOngoing(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
streamMetadataTasks = spy(new StreamMetadataTasks(streamStorePartialMock, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key"));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStorePartialMock, hostStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key");
this.streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStorePartialMock, executor), new ScaleOperationTask(streamMetadataTasks, streamStorePartialMock, executor), new UpdateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new SealStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new DeleteStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new TruncateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), executor);
consumer = new ControllerService(streamStorePartialMock, hostStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null);
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
streamStorePartialMock.createScope(SCOPE).join();
long start = System.currentTimeMillis();
streamStorePartialMock.createStream(SCOPE, stream1, configuration1, start, null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
StartScaleResponse response = streamStorePartialMock.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, false, null, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
streamStorePartialMock.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStorePartialMock.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
streamStorePartialMock.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
}
use of io.pravega.controller.store.stream.StartScaleResponse in project pravega by pravega.
the class StreamMetadataTasksTest method scale.
private void scale(String scope, String stream, Map<Integer, Long> sealedSegmentsWithSize, List<AbstractMap.SimpleEntry<Double, Double>> newSegments) {
long scaleTs = System.currentTimeMillis();
ArrayList<Integer> sealedSegments = Lists.newArrayList(sealedSegmentsWithSize.keySet());
StartScaleResponse response = streamStorePartialMock.startScale(scope, stream, sealedSegments, newSegments, scaleTs, false, null, executor).join();
final List<Segment> scale1SegmentsCreated = response.getSegmentsCreated();
streamStorePartialMock.setState(scope, stream, State.SCALING, null, executor).join();
streamStorePartialMock.scaleNewSegmentsCreated(scope, stream, sealedSegments, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
streamStorePartialMock.scaleSegmentsSealed(scope, stream, sealedSegmentsWithSize, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
streamStorePartialMock.setState(scope, stream, State.ACTIVE, null, executor).join();
}
use of io.pravega.controller.store.stream.StartScaleResponse in project pravega by pravega.
the class ControllerServiceTest method setup.
@Before
public void setup() throws ExecutionException, InterruptedException {
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final ScalingPolicy policy2 = ScalingPolicy.fixed(3);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
final StreamConfiguration configuration2 = StreamConfiguration.builder().scope(SCOPE).streamName(stream2).scalingPolicy(policy2).build();
// createScope
streamStore.createScope(SCOPE).get();
// region createStream
startTs = System.currentTimeMillis();
OperationContext context = streamStore.createContext(SCOPE, stream1);
streamStore.createStream(SCOPE, stream1, configuration1, startTs, context, executor).get();
streamStore.setState(SCOPE, stream1, State.ACTIVE, context, executor);
OperationContext context2 = streamStore.createContext(SCOPE, stream2);
streamStore.createStream(SCOPE, stream2, configuration2, startTs, context2, executor).get();
streamStore.setState(SCOPE, stream2, State.ACTIVE, context2, executor);
// endregion
// region scaleSegments
SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.5, 0.75);
SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
scaleTs = System.currentTimeMillis();
StartScaleResponse startScaleResponse = streamStore.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), startTs + 20, false, null, executor).get();
List<Segment> segmentCreated = startScaleResponse.getSegmentsCreated();
streamStore.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.0, 0.5);
SimpleEntry<Double, Double> segment4 = new SimpleEntry<>(0.5, 0.75);
SimpleEntry<Double, Double> segment5 = new SimpleEntry<>(0.75, 1.0);
sealedSegments = Arrays.asList(0, 1, 2);
startScaleResponse = streamStore.startScale(SCOPE, stream2, sealedSegments, Arrays.asList(segment3, segment4, segment5), startTs + 20, false, null, executor).get();
segmentCreated = startScaleResponse.getSegmentsCreated();
streamStore.setState(SCOPE, stream2, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream2, sealedSegments, segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream2, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
// endregion
}
Aggregations