Search in sources :

Example 6 with EpochTransitionRecord

use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.

the class StreamMetadataStoreTest method sizeTest.

@Test(timeout = 30000)
public void sizeTest() throws Exception {
    final String scope = "ScopeSize";
    final String stream = "StreamSize";
    final ScalingPolicy policy = ScalingPolicy.fixed(2);
    final RetentionPolicy retentionPolicy = RetentionPolicy.builder().retentionType(RetentionPolicy.RetentionType.SIZE).retentionParam(100L).build();
    final StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).retentionPolicy(retentionPolicy).build();
    long start = System.currentTimeMillis();
    store.createScope(scope, null, executor).get();
    store.createStream(scope, stream, configuration, start, null, executor).get();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    bucketStore.addStreamToBucketStore(BucketStore.ServiceType.RetentionService, scope, stream, executor).get();
    Set<String> streams = bucketStore.getStreamsForBucket(BucketStore.ServiceType.RetentionService, 0, executor).get();
    assertTrue(streams.contains(String.format("%s/%s", scope, stream)));
    // region Size Computation on stream cuts on epoch 0
    Map<Long, Long> map1 = new HashMap<>();
    map1.put(0L, 10L);
    map1.put(1L, 10L);
    Long size = store.getSizeTillStreamCut(scope, stream, map1, Optional.empty(), null, executor).join();
    assertEquals(20L, (long) size);
    long recordingTime = System.currentTimeMillis();
    StreamCutRecord streamCut1 = new StreamCutRecord(recordingTime, size, ImmutableMap.copyOf(map1));
    store.addStreamCutToRetentionSet(scope, stream, streamCut1, null, executor).get();
    Map<Long, Long> map2 = new HashMap<>();
    map2.put(0L, 20L);
    map2.put(1L, 20L);
    size = store.getSizeTillStreamCut(scope, stream, map2, Optional.empty(), null, executor).join();
    assertEquals(40L, (long) size);
    StreamCutRecord streamCut2 = new StreamCutRecord(recordingTime + 10, size, ImmutableMap.copyOf(map2));
    store.addStreamCutToRetentionSet(scope, stream, streamCut2, null, executor).get();
    Map<Long, Long> map3 = new HashMap<>();
    map3.put(0L, 30L);
    map3.put(1L, 30L);
    size = store.getSizeTillStreamCut(scope, stream, map3, Optional.empty(), null, executor).join();
    assertEquals(60L, (long) size);
    StreamCutRecord streamCut3 = new StreamCutRecord(recordingTime + 20, 60L, ImmutableMap.copyOf(map3));
    store.addStreamCutToRetentionSet(scope, stream, streamCut3, null, executor).get();
    // endregion
    // region Size Computation on multiple epochs
    long scaleTs = System.currentTimeMillis();
    SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.0, 0.5);
    SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.5, 1.0);
    List<Long> scale1SealedSegments = Lists.newArrayList(0L, 1L);
    VersionedMetadata<EpochTransitionRecord> versioned = store.submitScale(scope, stream, scale1SealedSegments, Arrays.asList(segment2, segment3), scaleTs, null, null, executor).join();
    VersionedMetadata<State> state = store.getVersionedState(scope, stream, null, executor).get();
    state = store.updateVersionedState(scope, stream, State.SCALING, state, null, executor).get();
    store.startScale(scope, stream, false, versioned, state, null, executor).join();
    store.scaleCreateNewEpochs(scope, stream, versioned, null, executor).join();
    store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 40L)), versioned, null, executor).join();
    store.completeScale(scope, stream, versioned, null, executor).join();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    // complex stream cut - across two epochs
    Map<Long, Long> map4 = new HashMap<>();
    map4.put(0L, 40L);
    map4.put(computeSegmentId(3, 1), 10L);
    size = store.getSizeTillStreamCut(scope, stream, map4, Optional.empty(), null, executor).join();
    assertEquals(Long.valueOf(90L), size);
    StreamCutRecord streamCut4 = new StreamCutRecord(recordingTime + 30, size, ImmutableMap.copyOf(map4));
    store.addStreamCutToRetentionSet(scope, stream, streamCut4, null, executor).get();
    // simple stream cut on epoch 2
    Map<Long, Long> map5 = new HashMap<>();
    map5.put(computeSegmentId(2, 1), 10L);
    map5.put(computeSegmentId(3, 1), 10L);
    size = store.getSizeTillStreamCut(scope, stream, map5, Optional.empty(), null, executor).join();
    assertTrue(size == 100L);
    StreamCutRecord streamCut5 = new StreamCutRecord(recordingTime + 30, size, ImmutableMap.copyOf(map5));
    store.addStreamCutToRetentionSet(scope, stream, streamCut5, null, executor).get();
// endregion
}
Also used : Arrays(java.util.Arrays) StreamCut(io.pravega.client.stream.StreamCut) ArgumentMatchers(org.mockito.ArgumentMatchers) StreamSegmentRecord(io.pravega.controller.store.stream.records.StreamSegmentRecord) AssertExtensions(io.pravega.test.common.AssertExtensions) Random(java.util.Random) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) Pair(org.apache.commons.lang3.tuple.Pair) Stream(io.pravega.client.stream.Stream) Duration(java.time.Duration) Map(java.util.Map) After(org.junit.After) Mockito.doAnswer(org.mockito.Mockito.doAnswer) Controller(io.pravega.controller.stream.api.grpc.v1.Controller) ReaderGroupConfig(io.pravega.client.stream.ReaderGroupConfig) DeleteScopeStatus(io.pravega.controller.stream.api.grpc.v1.Controller.DeleteScopeStatus) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) StreamCutReferenceRecord(io.pravega.controller.store.stream.records.StreamCutReferenceRecord) StreamTruncationRecord(io.pravega.controller.store.stream.records.StreamTruncationRecord) ImmutableMap(com.google.common.collect.ImmutableMap) Set(java.util.Set) CompletionException(java.util.concurrent.CompletionException) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) List(java.util.List) Assert.assertFalse(org.junit.Assert.assertFalse) Optional(java.util.Optional) HistoryTimeSeries(io.pravega.controller.store.stream.records.HistoryTimeSeries) Futures(io.pravega.common.concurrent.Futures) Segment(io.pravega.client.segment.impl.Segment) CommittingTransactionsRecord(io.pravega.controller.store.stream.records.CommittingTransactionsRecord) NameUtils.computeSegmentId(io.pravega.shared.NameUtils.computeSegmentId) RetentionPolicy(io.pravega.client.stream.RetentionPolicy) Exceptions(io.pravega.common.Exceptions) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) RetentionSet(io.pravega.controller.store.stream.records.RetentionSet) Mockito.spy(org.mockito.Mockito.spy) ArrayList(java.util.ArrayList) Strings(com.google.common.base.Strings) ReaderGroupConfigRecord(io.pravega.controller.store.stream.records.ReaderGroupConfigRecord) Lists(com.google.common.collect.Lists) StreamCutImpl(io.pravega.client.stream.impl.StreamCutImpl) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) RecordHelper(io.pravega.controller.store.stream.records.RecordHelper) SimpleEntry(java.util.AbstractMap.SimpleEntry) SealedSegmentsMapShard(io.pravega.controller.store.stream.records.SealedSegmentsMapShard) Before(org.junit.Before) NameUtils(io.pravega.shared.NameUtils) Assert.assertNotNull(org.junit.Assert.assertNotNull) WriterMark(io.pravega.controller.store.stream.records.WriterMark) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) StreamCutRecord(io.pravega.controller.store.stream.records.StreamCutRecord) TxnResource(io.pravega.controller.store.task.TxnResource) Assert.assertNotEquals(org.junit.Assert.assertNotEquals) ExecutionException(java.util.concurrent.ExecutionException) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord) AbstractMap(java.util.AbstractMap) EpochRecord(io.pravega.controller.store.stream.records.EpochRecord) Assert.assertNull(org.junit.Assert.assertNull) Version(io.pravega.controller.store.Version) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Assert(org.junit.Assert) Collections(java.util.Collections) Mockito.reset(org.mockito.Mockito.reset) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Assert.assertEquals(org.junit.Assert.assertEquals) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) HashMap(java.util.HashMap) SimpleEntry(java.util.AbstractMap.SimpleEntry) RetentionPolicy(io.pravega.client.stream.RetentionPolicy) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) StreamCutRecord(io.pravega.controller.store.stream.records.StreamCutRecord) Test(org.junit.Test)

Example 7 with EpochTransitionRecord

use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.

the class StreamTest method testConcurrentGetSuccessorScale.

private void testConcurrentGetSuccessorScale(StreamMetadataStore store, BiFunction<String, String, Stream> createStream) throws Exception {
    final ScalingPolicy policy = ScalingPolicy.fixed(1);
    final String streamName = "test";
    String scopeName = "test";
    store.createScope(scopeName, null, executorService()).join();
    Stream stream = spy(createStream.apply(scopeName, streamName));
    StreamConfiguration streamConfig = StreamConfiguration.builder().scalingPolicy(policy).build();
    store.createStream(scopeName, streamName, streamConfig, System.currentTimeMillis(), null, executorService()).join();
    store.setState(scopeName, streamName, State.ACTIVE, null, executorService()).join();
    List<Map.Entry<Double, Double>> newRanges;
    newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(0.0, 0.5), new AbstractMap.SimpleEntry<>(0.5, 1.0));
    long scale = System.currentTimeMillis();
    ArrayList<Long> sealedSegments = Lists.newArrayList(0L);
    long one = NameUtils.computeSegmentId(1, 1);
    long two = NameUtils.computeSegmentId(2, 1);
    OperationContext context = getContext();
    VersionedMetadata<EpochTransitionRecord> response = stream.submitScale(sealedSegments, newRanges, scale, null, context).join();
    Map<Long, Map.Entry<Double, Double>> newSegments = response.getObject().getNewSegmentsWithRange();
    VersionedMetadata<State> state = stream.getVersionedState(context).join();
    state = stream.updateVersionedState(state, State.SCALING, context).join();
    stream.startScale(false, response, state, context).join();
    stream.scaleCreateNewEpoch(response, context).join();
    // history table has a partial record at this point.
    // now we could have sealed the segments so get successors could be called.
    Map<Long, List<Long>> successors = stream.getSuccessorsWithPredecessors(0, context).join().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), Map.Entry::getValue));
    assertTrue(successors.containsKey(one) && successors.containsKey(two));
    // reset mock so that we can resume scale operation
    stream.scaleOldSegmentsSealed(sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), response, context).join();
    stream.completeScale(response, context).join();
    successors = stream.getSuccessorsWithPredecessors(0, context).join().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), Map.Entry::getValue));
    assertTrue(successors.containsKey(one) && successors.containsKey(two));
}
Also used : TestOperationContext(io.pravega.controller.store.TestOperationContext) Arrays(java.util.Arrays) SegmentHelper(io.pravega.controller.server.SegmentHelper) BiFunction(java.util.function.BiFunction) Exceptions(io.pravega.common.Exceptions) Cleanup(lombok.Cleanup) Mockito.spy(org.mockito.Mockito.spy) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ArrayList(java.util.ArrayList) RetryOneTime(org.apache.curator.retry.RetryOneTime) Lists(com.google.common.collect.Lists) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) StateRecord(io.pravega.controller.store.stream.records.StateRecord) Map(java.util.Map) After(org.junit.After) ClassRule(org.junit.ClassRule) Before(org.junit.Before) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) NameUtils(io.pravega.shared.NameUtils) SegmentHelperMock(io.pravega.controller.mocks.SegmentHelperMock) Assert.assertTrue(org.junit.Assert.assertTrue) PravegaZkCuratorResource(io.pravega.controller.PravegaZkCuratorResource) Test(org.junit.Test) Collectors(java.util.stream.Collectors) PravegaTablesScope(io.pravega.controller.store.PravegaTablesScope) ExecutionException(java.util.concurrent.ExecutionException) ZKStoreHelper(io.pravega.controller.store.ZKStoreHelper) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord) AbstractMap(java.util.AbstractMap) List(java.util.List) ThreadPooledTestSuite(io.pravega.test.common.ThreadPooledTestSuite) TestOperationContext(io.pravega.controller.store.TestOperationContext) RetryPolicy(org.apache.curator.RetryPolicy) PravegaTablesStoreHelper(io.pravega.controller.store.PravegaTablesStoreHelper) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Futures(io.pravega.common.concurrent.Futures) Assert.assertEquals(org.junit.Assert.assertEquals) GrpcAuthHelper(io.pravega.controller.server.security.auth.GrpcAuthHelper) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ArrayList(java.util.ArrayList) List(java.util.List) Map(java.util.Map) AbstractMap(java.util.AbstractMap)

Example 8 with EpochTransitionRecord

use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.

the class TaskTest method setUp.

@Before
public void setUp() throws Exception {
    zkServer = new TestingServerStarter().start();
    zkServer.start();
    cli = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new RetryOneTime(2000));
    cli.start();
    streamStore = getStream();
    taskMetadataStore = TaskStoreFactory.createZKStore(cli, executor);
    segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
    streamMetadataTasks = new StreamMetadataTasks(streamStore, StreamStoreFactory.createInMemoryBucketStore(), taskMetadataStore, segmentHelperMock, executor, HOSTNAME, GrpcAuthHelper.getDisabledAuthHelper());
    final String stream2 = "stream2";
    final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
    final ScalingPolicy policy2 = ScalingPolicy.fixed(3);
    final StreamConfiguration configuration1 = StreamConfiguration.builder().scalingPolicy(policy1).build();
    final StreamConfiguration configuration2 = StreamConfiguration.builder().scalingPolicy(policy2).build();
    // region createStream
    streamStore.createScope(SCOPE, null, executor).join();
    long start = System.currentTimeMillis();
    streamStore.createStream(SCOPE, stream1, configuration1, start, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).join();
    streamStore.createStream(SCOPE, stream2, configuration2, start, null, executor).join();
    streamStore.setState(SCOPE, stream2, State.ACTIVE, null, executor).join();
    // endregion
    // region scaleSegments
    AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
    AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
    List<Long> sealedSegments = Collections.singletonList(1L);
    VersionedMetadata<EpochTransitionRecord> versioned = streamStore.submitScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, null, null, executor).get();
    EpochTransitionRecord response = versioned.getObject();
    Map<Long, Map.Entry<Double, Double>> segmentsCreated = response.getNewSegmentsWithRange();
    VersionedMetadata<State> state = streamStore.getVersionedState(SCOPE, stream1, null, executor).join();
    state = streamStore.updateVersionedState(SCOPE, stream1, State.SCALING, state, null, executor).get();
    versioned = streamStore.startScale(SCOPE, stream1, false, versioned, state, null, executor).join();
    streamStore.scaleCreateNewEpochs(SCOPE, stream1, versioned, null, executor).get();
    streamStore.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, null, executor).get();
    streamStore.completeScale(SCOPE, stream1, versioned, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
    AbstractMap.SimpleEntry<Double, Double> segment3 = new AbstractMap.SimpleEntry<>(0.0, 0.5);
    AbstractMap.SimpleEntry<Double, Double> segment4 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
    AbstractMap.SimpleEntry<Double, Double> segment5 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
    List<Long> sealedSegments1 = Arrays.asList(0L, 1L, 2L);
    versioned = streamStore.submitScale(SCOPE, stream2, sealedSegments1, Arrays.asList(segment3, segment4, segment5), start + 20, null, null, executor).get();
    response = versioned.getObject();
    segmentsCreated = response.getNewSegmentsWithRange();
    state = streamStore.getVersionedState(SCOPE, stream2, null, executor).join();
    state = streamStore.updateVersionedState(SCOPE, stream2, State.SCALING, state, null, executor).get();
    versioned = streamStore.startScale(SCOPE, stream2, false, versioned, state, null, executor).join();
    streamStore.scaleCreateNewEpochs(SCOPE, stream2, versioned, null, executor).get();
    streamStore.scaleSegmentsSealed(SCOPE, stream2, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, null, executor).get();
    streamStore.completeScale(SCOPE, stream2, versioned, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
// endregion
}
Also used : Arrays(java.util.Arrays) AssertExtensions(io.pravega.test.common.AssertExtensions) Cleanup(lombok.Cleanup) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) TaskMetadataStore(io.pravega.controller.store.task.TaskMetadataStore) Map(java.util.Map) After(org.junit.After) LockFailedException(io.pravega.controller.store.task.LockFailedException) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) CreateStreamStatus(io.pravega.controller.stream.api.grpc.v1.Controller.CreateStreamStatus) CompletionException(java.util.concurrent.CompletionException) UUID(java.util.UUID) EqualsAndHashCode(lombok.EqualsAndHashCode) Collectors(java.util.stream.Collectors) TaggedResource(io.pravega.controller.store.task.TaggedResource) Serializable(java.io.Serializable) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) CuratorFramework(org.apache.curator.framework.CuratorFramework) Assert.assertFalse(org.junit.Assert.assertFalse) Optional(java.util.Optional) Resource(io.pravega.controller.store.task.Resource) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) GrpcAuthHelper(io.pravega.controller.server.security.auth.GrpcAuthHelper) CuratorFrameworkFactory(org.apache.curator.framework.CuratorFrameworkFactory) StreamStoreFactory(io.pravega.controller.store.stream.StreamStoreFactory) SegmentHelper(io.pravega.controller.server.SegmentHelper) CompletableFuture(java.util.concurrent.CompletableFuture) ArrayList(java.util.ArrayList) RetryOneTime(org.apache.curator.retry.RetryOneTime) TestingServerStarter(io.pravega.test.common.TestingServerStarter) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) TestingServer(org.apache.curator.test.TestingServer) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) HostMonitorConfigImpl(io.pravega.controller.store.host.impl.HostMonitorConfigImpl) Before(org.junit.Before) SegmentHelperMock(io.pravega.controller.mocks.SegmentHelperMock) TestTasks(io.pravega.controller.task.Stream.TestTasks) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) HostStoreFactory(io.pravega.controller.store.host.HostStoreFactory) ExecutionException(java.util.concurrent.ExecutionException) AbstractMap(java.util.AbstractMap) TaskStoreFactory(io.pravega.controller.store.task.TaskStoreFactory) HostControllerStore(io.pravega.controller.store.host.HostControllerStore) Data(lombok.Data) State(io.pravega.controller.store.stream.State) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Assert(org.junit.Assert) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Assert.assertEquals(org.junit.Assert.assertEquals) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) RetryOneTime(org.apache.curator.retry.RetryOneTime) TestingServerStarter(io.pravega.test.common.TestingServerStarter) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) AbstractMap(java.util.AbstractMap) State(io.pravega.controller.store.stream.State) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) Before(org.junit.Before)

Example 9 with EpochTransitionRecord

use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.

the class ScaleRequestHandlerTest method concurrentDistinctScaleRun.

// concurrent run of scale 1 intermixed with scale 2
private void concurrentDistinctScaleRun(String stream, String funcToWaitOn, boolean isManual, Predicate<Throwable> firstExceptionPredicate, Map<String, Integer> invocationCount) throws Exception {
    StreamMetadataStore streamStore1 = getStore();
    StreamMetadataStore streamStore1Spied = spy(getStore());
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(1, 2, 1)).build();
    streamStore1.createStream(scope, stream, config, System.currentTimeMillis(), null, executor).join();
    streamStore1.setState(scope, stream, State.ACTIVE, null, executor).join();
    CompletableFuture<Void> wait = new CompletableFuture<>();
    CompletableFuture<Void> signal = new CompletableFuture<>();
    ScaleOpEvent event = new ScaleOpEvent(scope, stream, Lists.newArrayList(0L), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), isManual, System.currentTimeMillis(), System.currentTimeMillis());
    if (isManual) {
        streamStore1.submitScale(scope, stream, Lists.newArrayList(0L), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), System.currentTimeMillis(), null, null, executor).join();
    }
    StreamMetadataStore streamStore2 = getStore();
    ScaleOperationTask scaleRequestHandler1 = new ScaleOperationTask(streamMetadataTasks, streamStore1Spied, executor);
    ScaleOperationTask scaleRequestHandler2 = new ScaleOperationTask(streamMetadataTasks, streamStore2, executor);
    setMockLatch(streamStore1, streamStore1Spied, funcToWaitOn, signal, wait);
    CompletableFuture<Void> future1 = CompletableFuture.completedFuture(null).thenComposeAsync(v -> scaleRequestHandler1.execute(event), executor);
    signal.join();
    // let this run to completion. this should succeed
    scaleRequestHandler2.execute(event).join();
    long one = NameUtils.computeSegmentId(1, 1);
    ScaleOpEvent event2 = new ScaleOpEvent(scope, stream, Lists.newArrayList(one), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), isManual, System.currentTimeMillis(), System.currentTimeMillis());
    if (isManual) {
        streamStore1.submitScale(scope, stream, Lists.newArrayList(one), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), System.currentTimeMillis(), null, null, executor).join();
    }
    scaleRequestHandler2.execute(event2).join();
    // now complete wait latch.
    wait.complete(null);
    AssertExtensions.assertSuppliedFutureThrows("first scale should fail", () -> future1, firstExceptionPredicate);
    verify(streamStore1Spied, times(invocationCount.get("startScale"))).startScale(anyString(), anyString(), anyBoolean(), any(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("scaleCreateNewEpochs"))).scaleCreateNewEpochs(anyString(), anyString(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("scaleSegmentsSealed"))).scaleSegmentsSealed(anyString(), anyString(), any(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("completeScale"))).completeScale(anyString(), anyString(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("updateVersionedState"))).updateVersionedState(anyString(), anyString(), any(), any(), any(), any());
    // validate scale done
    VersionedMetadata<EpochTransitionRecord> versioned = streamStore1.getEpochTransition(scope, stream, null, executor).join();
    assertEquals(EpochTransitionRecord.EMPTY, versioned.getObject());
    assertEquals(4, getVersionNumber(versioned));
    assertEquals(2, streamStore1.getActiveEpoch(scope, stream, null, true, executor).join().getEpoch());
    assertEquals(State.ACTIVE, streamStore1.getState(scope, stream, true, null, executor).join());
    streamStore1.close();
    streamStore2.close();
}
Also used : EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) ScaleOperationTask(io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask) ScaleOpEvent(io.pravega.shared.controller.event.ScaleOpEvent) AbstractMap(java.util.AbstractMap) CompletableFuture(java.util.concurrent.CompletableFuture) StreamConfiguration(io.pravega.client.stream.StreamConfiguration)

Example 10 with EpochTransitionRecord

use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.

the class ControllerMetadataJsonSerializerTest method testEpochTransitionRecord.

@Test
public void testEpochTransitionRecord() {
    ImmutableSet<Long> segmentsToSeal = ImmutableSet.of(1L, 2L);
    ImmutableMap<Long, Map.Entry<Double, Double>> newSegmentsWithRange = ImmutableMap.of(3L, Map.entry(0.1, 0.2), 4L, Map.entry(0.3, 0.4));
    EpochTransitionRecord record = new EpochTransitionRecord(10, 100L, segmentsToSeal, newSegmentsWithRange);
    testRecordSerialization(record, EpochTransitionRecord.class);
}
Also used : EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) Test(org.junit.Test)

Aggregations

EpochTransitionRecord (io.pravega.controller.store.stream.records.EpochTransitionRecord)24 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)20 VersionedMetadata (io.pravega.controller.store.VersionedMetadata)18 Test (org.junit.Test)18 AbstractMap (java.util.AbstractMap)17 ArrayList (java.util.ArrayList)17 List (java.util.List)17 Map (java.util.Map)17 CompletableFuture (java.util.concurrent.CompletableFuture)16 Collectors (java.util.stream.Collectors)16 Exceptions (io.pravega.common.Exceptions)15 Futures (io.pravega.common.concurrent.Futures)15 EpochRecord (io.pravega.controller.store.stream.records.EpochRecord)15 StreamConfigurationRecord (io.pravega.controller.store.stream.records.StreamConfigurationRecord)15 Collections (java.util.Collections)15 UUID (java.util.UUID)15 Lists (com.google.common.collect.Lists)14 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)14 Optional (java.util.Optional)14 ImmutableMap (com.google.common.collect.ImmutableMap)13