use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.
the class StreamTestBase method scaleInputValidityTest.
@Test(timeout = 30000L)
public void scaleInputValidityTest() {
OperationContext context = getContext();
int startingSegmentNumber = new Random().nextInt(2000);
String name = "stream" + startingSegmentNumber;
PersistentStreamBase stream = createStream("scope", name, System.currentTimeMillis(), 5, startingSegmentNumber);
long timestamp = System.currentTimeMillis();
final double keyRangeChunk = 1.0 / 5;
long s0 = startingSegmentNumber;
long s1 = 1L + startingSegmentNumber;
long s2 = 2L + startingSegmentNumber;
long s3 = 3L + startingSegmentNumber;
long s4 = 4L + startingSegmentNumber;
VersionedMetadata<EpochTransitionRecord> etr = stream.getEpochTransition(context).join();
List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
AtomicReference<List<Map.Entry<Double, Double>>> newRangesRef = new AtomicReference<>(newRanges);
AtomicReference<VersionedMetadata<EpochTransitionRecord>> etrRef = new AtomicReference<>(etr);
// 1. empty newRanges
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 2. simple mismatch
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, keyRangeChunk));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 3. simple valid match
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
etr = stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etr, context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 4. valid 2 disjoint merges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 5. valid 1 merge and 1 disjoint
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(keyRangeChunk, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
etr = stream.submitScale(Lists.newArrayList(s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 6. valid 1 merge, 2 splits
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 7. 1 merge, 1 split and 1 invalid split
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 0.99));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 8. valid unsorted segments to seal
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 9. valid unsorted new ranges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 10. invalid input range low == high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 11. invalid input range low > high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 12. invalid overlapping key ranges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 3 * keyRangeChunk));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1, s2), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 13. invalid overlapping key ranges -- a contains b
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.33));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 14. invalid overlapping key ranges -- b contains a (with b.low == a.low)
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.33));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 15. invalid overlapping key ranges b.low < a.high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.35));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 16. invalid overlapping key ranges.. a.high < b.low
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.25));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// scale the stream for inconsistent epoch transition
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.4));
scaleStream(stream, System.currentTimeMillis(), Lists.newArrayList(s0, s1), newRanges, Collections.emptyMap());
// 17. precondition failure
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
etrRef.set(stream.getEpochTransition(context).join());
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
etrRef.set(stream.getEpochTransition(context).join());
// get current number of segments.
List<Long> segments = stream.getActiveSegments(context).join().stream().map(StreamSegmentRecord::segmentId).collect(Collectors.toList());
// set minimum number of segments to segments.size.
stream.startUpdateConfiguration(StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(segments.size())).build(), context).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = stream.getVersionedConfigurationRecord(context).join();
stream.completeUpdateConfiguration(configRecord, context).join();
// attempt a scale down which should be rejected in submit scale.
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 1.0));
newRangesRef.set(newRanges);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(segments, newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
}
use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.
the class StreamTestBase method createStream.
private PersistentStreamBase createStream(String scope, String name, long time, int numOfSegments, int startingSegmentNumber, int chunkSize, int shardSize) {
OperationContext context = getContext();
createScope(scope, context);
PersistentStreamBase stream = getStream(scope, name, chunkSize, shardSize);
StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(numOfSegments)).build();
stream.create(config, time, startingSegmentNumber, context).thenCompose(x -> stream.updateState(State.ACTIVE, context)).join();
// set minimum number of segments to 1 so that we can also test scale downs
stream.startUpdateConfiguration(StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build(), context).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = stream.getVersionedConfigurationRecord(context).join();
stream.completeUpdateConfiguration(configRecord, context).join();
return stream;
}
use of io.pravega.controller.store.stream.records.StreamConfigurationRecord in project pravega by pravega.
the class PravegaTablesStreamMetadataStoreTest method testScaleMetadata.
@Test
public void testScaleMetadata() throws Exception {
String scope = "testScopeScale";
String stream = "testStreamScale1";
ScalingPolicy policy = ScalingPolicy.fixed(3);
StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).build();
SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.0, 0.5);
SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.5, 1.0);
List<Map.Entry<Double, Double>> newRanges = Arrays.asList(segment1, segment2);
store.createScope(scope, null, executor).get();
store.createStream(scope, stream, configuration, System.currentTimeMillis(), null, executor).get();
store.setState(scope, stream, State.ACTIVE, null, executor).get();
// set minimum number of segments to 1 so that we can also test scale downs
configuration = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
store.startUpdateConfiguration(scope, stream, configuration, null, executor).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = store.getConfigurationRecord(scope, stream, null, executor).join();
store.completeUpdateConfiguration(scope, stream, configRecord, null, executor).join();
List<ScaleMetadata> scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
assertTrue(scaleIncidents.size() == 1);
assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
// scale
scale(scope, stream, scaleIncidents.get(0).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
assertTrue(scaleIncidents.size() == 2);
assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
// scale again
scale(scope, stream, scaleIncidents.get(1).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
assertTrue(scaleIncidents.size() == 3);
assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
assertTrue(scaleIncidents.get(2).getSegments().size() == 2);
// scale again
scale(scope, stream, scaleIncidents.get(2).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, 0, Long.MAX_VALUE, null, executor).get();
assertTrue(scaleIncidents.size() == 4);
assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
assertTrue(scaleIncidents.get(2).getSegments().size() == 2);
assertTrue(scaleIncidents.get(3).getSegments().size() == 2);
}
Aggregations