Search in sources :

Example 26 with StreamConfigurationRecord

use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.

the class StreamTestBase method scaleInputValidityTest.

@Test(timeout = 30000L)
public void scaleInputValidityTest() {
    OperationContext context = getContext();
    int startingSegmentNumber = new Random().nextInt(2000);
    String name = "stream" + startingSegmentNumber;
    PersistentStreamBase stream = createStream("scope", name, System.currentTimeMillis(), 5, startingSegmentNumber);
    long timestamp = System.currentTimeMillis();
    final double keyRangeChunk = 1.0 / 5;
    long s0 = startingSegmentNumber;
    long s1 = 1L + startingSegmentNumber;
    long s2 = 2L + startingSegmentNumber;
    long s3 = 3L + startingSegmentNumber;
    long s4 = 4L + startingSegmentNumber;
    VersionedMetadata<EpochTransitionRecord> etr = stream.getEpochTransition(context).join();
    List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
    AtomicReference<List<Map.Entry<Double, Double>>> newRangesRef = new AtomicReference<>(newRanges);
    AtomicReference<VersionedMetadata<EpochTransitionRecord>> etrRef = new AtomicReference<>(etr);
    // 1. empty newRanges
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 2. simple mismatch
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, keyRangeChunk));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 3. simple valid match
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    etr = stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etr, context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 4. valid 2 disjoint merges
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
    etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 5. valid 1 merge and 1 disjoint
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(keyRangeChunk, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
    etr = stream.submitScale(Lists.newArrayList(s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 6. valid 1 merge, 2 splits
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
    etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 7. 1 merge, 1 split and 1 invalid split
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 0.99));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 8. valid unsorted segments to seal
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
    etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 9. valid unsorted new ranges
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
    newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
    etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
    etr = resetScale(etr, stream);
    etrRef.set(etr);
    // 10. invalid input range low == high
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.2));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 11. invalid input range low > high
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.2));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 12. invalid overlapping key ranges
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 3 * keyRangeChunk));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1, s2), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 13. invalid overlapping key ranges -- a contains b
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.33));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 14. invalid overlapping key ranges -- b contains a (with b.low == a.low)
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.33));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 15. invalid overlapping key ranges b.low < a.high
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.35));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // 16. invalid overlapping key ranges.. a.high < b.low
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.25));
    newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
    // scale the stream for inconsistent epoch transition
    newRanges = new ArrayList<>();
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.4));
    scaleStream(stream, System.currentTimeMillis(), Lists.newArrayList(s0, s1), newRanges, Collections.emptyMap());
    // 17. precondition failure
    newRanges = new ArrayList<>();
    newRangesRef.set(newRanges);
    newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
    etrRef.set(stream.getEpochTransition(context).join());
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
    etrRef.set(stream.getEpochTransition(context).join());
    // get current number of segments.
    List<Long> segments = stream.getActiveSegments(context).join().stream().map(StreamSegmentRecord::segmentId).collect(Collectors.toList());
    // set minimum number of segments to segments.size.
    stream.startUpdateConfiguration(StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(segments.size())).build(), context).join();
    VersionedMetadata<StreamConfigurationRecord> configRecord = stream.getVersionedConfigurationRecord(context).join();
    stream.completeUpdateConfiguration(configRecord, context).join();
    // attempt a scale down which should be rejected in submit scale.
    newRanges = new ArrayList<>();
    newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 1.0));
    newRangesRef.set(newRanges);
    AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(segments, newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
}
Also used : ArrayList(java.util.ArrayList) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) AbstractMap(java.util.AbstractMap) Random(java.util.Random) List(java.util.List) ArrayList(java.util.ArrayList) LinkedList(java.util.LinkedList) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) TestOperationContext(io.pravega.controller.store.TestOperationContext) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) AtomicLong(java.util.concurrent.atomic.AtomicLong) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) AbstractMap(java.util.AbstractMap) Test(org.junit.Test)

Example 27 with StreamConfigurationRecord

use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.

the class StreamTestBase method createStream.

private PersistentStreamBase createStream(String scope, String name, long time, int numOfSegments, int startingSegmentNumber, int chunkSize, int shardSize) {
    OperationContext context = getContext();
    createScope(scope, context);
    PersistentStreamBase stream = getStream(scope, name, chunkSize, shardSize);
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(numOfSegments)).build();
    stream.create(config, time, startingSegmentNumber, context).thenCompose(x -> stream.updateState(State.ACTIVE, context)).join();
    // set minimum number of segments to 1 so that we can also test scale downs
    stream.startUpdateConfiguration(StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build(), context).join();
    VersionedMetadata<StreamConfigurationRecord> configRecord = stream.getVersionedConfigurationRecord(context).join();
    stream.completeUpdateConfiguration(configRecord, context).join();
    return stream;
}
Also used : TestOperationContext(io.pravega.controller.store.TestOperationContext) StreamSegmentRecord(io.pravega.controller.store.stream.records.StreamSegmentRecord) AssertExtensions(io.pravega.test.common.AssertExtensions) Random(java.util.Random) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) Mockito.anyBoolean(org.mockito.Mockito.anyBoolean) Map(java.util.Map) After(org.junit.After) Mockito.doAnswer(org.mockito.Mockito.doAnswer) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) StreamTruncationRecord(io.pravega.controller.store.stream.records.StreamTruncationRecord) ImmutableMap(com.google.common.collect.ImmutableMap) Set(java.util.Set) ActiveTxnRecord(io.pravega.controller.store.stream.records.ActiveTxnRecord) UUID(java.util.UUID) Collectors(java.util.stream.Collectors) List(java.util.List) Optional(java.util.Optional) HistoryTimeSeries(io.pravega.controller.store.stream.records.HistoryTimeSeries) Futures(io.pravega.common.concurrent.Futures) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) CommittingTransactionsRecord(io.pravega.controller.store.stream.records.CommittingTransactionsRecord) NameUtils.computeSegmentId(io.pravega.shared.NameUtils.computeSegmentId) Exceptions(io.pravega.common.Exceptions) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashMap(java.util.HashMap) Mockito.spy(org.mockito.Mockito.spy) AtomicReference(java.util.concurrent.atomic.AtomicReference) Supplier(java.util.function.Supplier) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Lists(com.google.common.collect.Lists) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) LinkedList(java.util.LinkedList) SealedSegmentsMapShard(io.pravega.controller.store.stream.records.SealedSegmentsMapShard) Before(org.junit.Before) NameUtils.getEpoch(io.pravega.shared.NameUtils.getEpoch) NameUtils(io.pravega.shared.NameUtils) WriterMark(io.pravega.controller.store.stream.records.WriterMark) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) Mockito.times(org.mockito.Mockito.times) Assert.assertNotEquals(org.junit.Assert.assertNotEquals) Mockito.verify(org.mockito.Mockito.verify) AtomicLong(java.util.concurrent.atomic.AtomicLong) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord) AbstractMap(java.util.AbstractMap) EpochRecord(io.pravega.controller.store.stream.records.EpochRecord) Version(io.pravega.controller.store.Version) TestOperationContext(io.pravega.controller.store.TestOperationContext) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Assert.assertEquals(org.junit.Assert.assertEquals) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord)

Example 28 with StreamConfigurationRecord

use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.

the class PravegaTablesStreamMetadataStoreTest method testScaleMetadata.

@Test
public void testScaleMetadata() throws Exception {
    String scope = "testScopeScale";
    String stream = "testStreamScale1";
    ScalingPolicy policy = ScalingPolicy.fixed(3);
    StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).build();
    SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.0, 0.5);
    SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.5, 1.0);
    List<Map.Entry<Double, Double>> newRanges = Arrays.asList(segment1, segment2);
    store.createScope(scope, null, executor).get();
    store.createStream(scope, stream, configuration, System.currentTimeMillis(), null, executor).get();
    store.setState(scope, stream, State.ACTIVE, null, executor).get();
    // set minimum number of segments to 1 so that we can also test scale downs
    configuration = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
    store.startUpdateConfiguration(scope, stream, configuration, null, executor).join();
    VersionedMetadata<StreamConfigurationRecord> configRecord = store.getConfigurationRecord(scope, stream, null, executor).join();
    store.completeUpdateConfiguration(scope, stream, configRecord, null, executor).join();
    List<ScaleMetadata> scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
    assertTrue(scaleIncidents.size() == 1);
    assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
    // scale
    scale(scope, stream, scaleIncidents.get(0).getSegments(), newRanges);
    scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
    assertTrue(scaleIncidents.size() == 2);
    assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
    assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
    // scale again
    scale(scope, stream, scaleIncidents.get(1).getSegments(), newRanges);
    scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
    assertTrue(scaleIncidents.size() == 3);
    assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
    assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
    assertTrue(scaleIncidents.get(2).getSegments().size() == 2);
    // scale again
    scale(scope, stream, scaleIncidents.get(2).getSegments(), newRanges);
    scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
    assertTrue(scaleIncidents.size() == 4);
    assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
    assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
    assertTrue(scaleIncidents.get(2).getSegments().size() == 2);
    assertTrue(scaleIncidents.get(3).getSegments().size() == 2);
}
Also used : ScalingPolicy(io.pravega.client.stream.ScalingPolicy) SimpleEntry(java.util.AbstractMap.SimpleEntry) SimpleEntry(java.util.AbstractMap.SimpleEntry) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) Mockito.anyString(org.mockito.Mockito.anyString) StreamConfigurationRecord(io.pravega.controller.store.stream.records.StreamConfigurationRecord) Test(org.junit.Test)

Aggregations

StreamConfigurationRecord (io.pravega.controller.store.stream.records.StreamConfigurationRecord)28 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)23 Test (org.junit.Test)22 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)16 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)14 VersionedMetadata (io.pravega.controller.store.VersionedMetadata)10 HashMap (java.util.HashMap)10 CompletableFuture (java.util.concurrent.CompletableFuture)10 StreamTruncationRecord (io.pravega.controller.store.stream.records.StreamTruncationRecord)9 ArgumentMatchers.anyLong (org.mockito.ArgumentMatchers.anyLong)9 AtomicLong (java.util.concurrent.atomic.AtomicLong)8 RetentionPolicy (io.pravega.client.stream.RetentionPolicy)7 Lists (com.google.common.collect.Lists)6 Segment (io.pravega.client.segment.impl.Segment)6 ReaderGroupConfig (io.pravega.client.stream.ReaderGroupConfig)6 Stream (io.pravega.client.stream.Stream)6 StreamCut (io.pravega.client.stream.StreamCut)6 StreamCutImpl (io.pravega.client.stream.impl.StreamCutImpl)6 Exceptions (io.pravega.common.Exceptions)6 ExecutorServiceHelpers (io.pravega.common.concurrent.ExecutorServiceHelpers)6