use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.
the class ZkStreamTest method testZkStream.
@Test(timeout = 30000)
public void testZkStream() throws Exception {
double keyChunk = 1.0 / 5;
final ScalingPolicy policy = ScalingPolicy.fixed(5);
@Cleanup final StreamMetadataStore store = new ZKStreamMetadataStore(cli, executor);
final String streamName = "test";
store.createScope(SCOPE, null, executor).get();
StreamConfiguration streamConfig = StreamConfiguration.builder().scalingPolicy(policy).build();
store.createStream(SCOPE, streamName, streamConfig, System.currentTimeMillis(), null, executor).get();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).get();
OperationContext context = store.createStreamContext(SCOPE, streamName, 0L);
// set minimum number of segments to 1 so that we can also test scale downs
streamConfig = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build();
store.startUpdateConfiguration(SCOPE, streamName, streamConfig, null, executor).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = store.getConfigurationRecord(SCOPE, streamName, null, executor).join();
store.completeUpdateConfiguration(SCOPE, streamName, configRecord, null, executor).join();
List<StreamSegmentRecord> segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0L, 1L, 2L, 3L, 4L).contains(x.segmentId())));
long start = segments.get(0).getCreationTime();
assertEquals(store.getConfiguration(SCOPE, streamName, context, executor).get(), streamConfig);
List<Map.Entry<Double, Double>> newRanges;
// existing range 0 = 0 - .2, 1 = .2 - .4, 2 = .4 - .6, 3 = .6 - .8, 4 = .8 - 1.0
// 3, 4 -> 5 = .6 - 1.0
newRanges = Collections.singletonList(new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale1 = start + 10000;
ArrayList<Long> sealedSegments = Lists.newArrayList(3L, 4L);
long five = computeSegmentId(5, 1);
VersionedMetadata<EpochTransitionRecord> versioned = store.submitScale(SCOPE, streamName, sealedSegments, newRanges, scale1, null, context, executor).get();
VersionedMetadata<State> state = store.getVersionedState(SCOPE, streamName, null, executor).join();
state = store.updateVersionedState(SCOPE, streamName, State.SCALING, state, null, executor).join();
versioned = store.startScale(SCOPE, streamName, false, versioned, state, null, executor).join();
store.scaleCreateNewEpochs(SCOPE, streamName, versioned, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, context, executor).get();
store.completeScale(SCOPE, streamName, versioned, null, executor).join();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).join();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0L, 1L, 2L, five).contains(x.segmentId())));
// 1 -> 6 = 0.2 -.3, 7 = .3 - .4
// 2,5 -> 8 = .4 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(keyChunk, 0.3), new AbstractMap.SimpleEntry<>(0.3, 2 * keyChunk), new AbstractMap.SimpleEntry<>(2 * keyChunk, 1.0));
long scale2 = scale1 + 10000;
ArrayList<Long> sealedSegments1 = Lists.newArrayList(1L, 2L, five);
long six = computeSegmentId(6, 2);
long seven = computeSegmentId(7, 2);
long eight = computeSegmentId(8, 2);
versioned = store.submitScale(SCOPE, streamName, sealedSegments1, newRanges, scale2, null, context, executor).get();
EpochTransitionRecord response = versioned.getObject();
state = store.getVersionedState(SCOPE, streamName, null, executor).join();
state = store.updateVersionedState(SCOPE, streamName, State.SCALING, state, null, executor).join();
versioned = store.startScale(SCOPE, streamName, false, versioned, state, null, executor).join();
store.scaleCreateNewEpochs(SCOPE, streamName, versioned, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, context, executor).get();
store.completeScale(SCOPE, streamName, versioned, null, executor).join();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).join();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0L, six, seven, eight).contains(x.segmentId())));
// 7 -> 9 = .3 - .35, 10 = .35 - .6
// 8 -> 10 = .35 - .6, 11 = .6 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(0.3, 0.35), new AbstractMap.SimpleEntry<>(0.35, 3 * keyChunk), new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale3 = scale2 + 10000;
long nine = computeSegmentId(9, 3);
long ten = computeSegmentId(10, 3);
long eleven = computeSegmentId(11, 3);
ArrayList<Long> sealedSegments2 = Lists.newArrayList(seven, eight);
versioned = store.submitScale(SCOPE, streamName, sealedSegments2, newRanges, scale3, null, context, executor).get();
response = versioned.getObject();
state = store.getVersionedState(SCOPE, streamName, null, executor).join();
state = store.updateVersionedState(SCOPE, streamName, State.SCALING, state, null, executor).join();
store.startScale(SCOPE, streamName, false, versioned, state, null, executor).join();
store.scaleCreateNewEpochs(SCOPE, streamName, versioned, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments2.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, context, executor).get();
store.completeScale(SCOPE, streamName, versioned, null, executor).join();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).join();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0L, six, nine, ten, eleven).contains(x.segmentId())));
Map<Long, List<Long>> successors = store.getSuccessors(SCOPE, streamName, 0L, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 1L, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 2 && successors.containsKey(six) && successors.get(six).containsAll(Collections.singleton(1L)) && successors.containsKey(seven) && successors.get(seven).containsAll(Collections.singleton(1L)));
successors = store.getSuccessors(SCOPE, streamName, 2L, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 1 && successors.containsKey(eight) && successors.get(eight).containsAll(Lists.newArrayList(2L, five)));
successors = store.getSuccessors(SCOPE, streamName, 3L, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 1 && successors.containsKey(five) && successors.get(five).containsAll(Lists.newArrayList(3L, 4L)));
successors = store.getSuccessors(SCOPE, streamName, 4L, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 1 && successors.containsKey(five) && successors.get(five).containsAll(Lists.newArrayList(3L, 4L)));
successors = store.getSuccessors(SCOPE, streamName, five, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 1 && successors.containsKey(eight) && successors.get(eight).containsAll(Lists.newArrayList(2L, five)));
successors = store.getSuccessors(SCOPE, streamName, six, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, seven, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 2 && successors.containsKey(nine) && successors.get(nine).containsAll(Collections.singleton(seven)) && successors.containsKey(ten) && successors.get(ten).containsAll(Lists.newArrayList(seven, eight)));
successors = store.getSuccessors(SCOPE, streamName, eight, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.size() == 2 && successors.containsKey(eleven) && successors.get(eleven).containsAll(Collections.singleton(eight)) && successors.containsKey(ten) && successors.get(ten).containsAll(Lists.newArrayList(seven, eight)));
successors = store.getSuccessors(SCOPE, streamName, nine, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, ten, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, eleven, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(successors.isEmpty());
// start -1
Map<Long, Long> historicalSegments = store.getSegmentsAtHead(SCOPE, streamName, context, executor).get().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.keySet().containsAll(Lists.newArrayList(0L, 1L, 2L, 3L, 4L)));
// start + 1
List<Long> segmentsInEpoch = store.getSegmentsInEpoch(SCOPE, streamName, 0, context, executor).get().stream().map(x -> x.segmentId()).collect(Collectors.toList());
assertEquals(segmentsInEpoch.size(), 5);
assertTrue(segmentsInEpoch.containsAll(Lists.newArrayList(0L, 1L, 2L, 3L, 4L)));
// scale1
segmentsInEpoch = store.getSegmentsInEpoch(SCOPE, streamName, 1, context, executor).get().stream().map(x -> x.segmentId()).collect(Collectors.toList());
assertEquals(segmentsInEpoch.size(), 4);
assertTrue(segmentsInEpoch.containsAll(Lists.newArrayList(0L, 1L, 2L, five)));
// scale2
segmentsInEpoch = store.getSegmentsInEpoch(SCOPE, streamName, 2, context, executor).get().stream().map(x -> x.segmentId()).collect(Collectors.toList());
assertEquals(segmentsInEpoch.size(), 4);
assertTrue(segmentsInEpoch.containsAll(Lists.newArrayList(0L, six, seven, eight)));
// scale3
segmentsInEpoch = store.getSegmentsInEpoch(SCOPE, streamName, 3, context, executor).get().stream().map(x -> x.segmentId()).collect(Collectors.toList());
assertEquals(segmentsInEpoch.size(), 5);
assertTrue(segmentsInEpoch.containsAll(Lists.newArrayList(0L, six, nine, ten, eleven)));
assertFalse(store.isSealed(SCOPE, streamName, context, executor).get());
assertNotEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal an already sealed stream.
store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal a non existing stream.
AssertExtensions.assertFutureThrows("", store.setSealed(SCOPE, "nonExistentStream", null, executor), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
store.markCold(SCOPE, streamName, 0L, System.currentTimeMillis() + 1000, null, executor).get();
assertTrue(store.isCold(SCOPE, streamName, 0L, null, executor).get());
Thread.sleep(1000);
assertFalse(store.isCold(SCOPE, streamName, 0L, null, executor).get());
store.markCold(SCOPE, streamName, 0L, System.currentTimeMillis() + 1000, null, executor).get();
store.removeMarker(SCOPE, streamName, 0L, null, executor).get();
assertFalse(store.isCold(SCOPE, streamName, 0L, null, executor).get());
}
use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.
the class StreamTestBase method testSegmentQueriesDuringScale.
@Test(timeout = 30000L)
public void testSegmentQueriesDuringScale() {
OperationContext context = getContext();
// start scale and perform `getSegment`, `getActiveEpoch` and `getEpoch` during different phases of scale
int startingSegmentNumber = new Random().nextInt(20);
Stream stream = createStream("scope", "stream" + startingSegmentNumber, System.currentTimeMillis(), 5, startingSegmentNumber);
StreamSegmentRecord segment = stream.getSegment(startingSegmentNumber, context).join();
assertEquals(segment.segmentId(), startingSegmentNumber + 0L);
assertEquals(segment.getKeyStart(), 0, 0);
assertEquals(segment.getKeyEnd(), 1.0 / 5, 0);
long segment5 = computeSegmentId(startingSegmentNumber + 5, 1);
long segment6 = computeSegmentId(startingSegmentNumber + 6, 1);
long segment7 = computeSegmentId(startingSegmentNumber + 7, 1);
long segment8 = computeSegmentId(startingSegmentNumber + 8, 1);
long segment9 = computeSegmentId(startingSegmentNumber + 9, 1);
List<Long> newSegments = Lists.newArrayList(segment5, segment6, segment7, segment8, segment9);
List<StreamSegmentRecord> originalSegments = stream.getActiveSegments(context).join();
List<Long> segmentsToSeal = originalSegments.stream().map(StreamSegmentRecord::segmentId).collect(Collectors.toList());
List<Map.Entry<Double, Double>> newRanges = originalSegments.stream().map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd())).collect(Collectors.toList());
VersionedMetadata<EpochTransitionRecord> etr = stream.getEpochTransition(context).join();
// submit scale
etr = stream.submitScale(segmentsToSeal, newRanges, System.currentTimeMillis(), etr, context).join();
VersionedMetadata<State> state = stream.getVersionedState(context).thenCompose(s -> stream.updateVersionedState(s, State.SCALING, context)).join();
etr = stream.startScale(true, etr, state, context).join();
List<StreamSegmentRecord> newCurrentSegments = stream.getActiveSegments(context).join();
assertEquals(originalSegments, newCurrentSegments);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.getSegment(segment9, context), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
Map<StreamSegmentRecord, List<Long>> successorsWithPredecessors = stream.getSuccessorsWithPredecessors(0L, context).join();
assertTrue(successorsWithPredecessors.isEmpty());
// scale create new epochs
stream.scaleCreateNewEpoch(etr, context).join();
newCurrentSegments = stream.getActiveSegments(context).join();
assertEquals(originalSegments, newCurrentSegments);
segment = stream.getSegment(segment9, context).join();
assertEquals(computeSegmentId(startingSegmentNumber + 9, 1), segment.segmentId());
assertEquals(segment.getKeyStart(), 1.0 / 5 * 4, 0);
assertEquals(segment.getKeyEnd(), 1.0, 0);
successorsWithPredecessors = stream.getSuccessorsWithPredecessors(startingSegmentNumber + 0L, context).join();
Set<StreamSegmentRecord> successors = successorsWithPredecessors.keySet();
assertEquals(1, successors.size());
StreamSegmentRecord five = successors.stream().findAny().get();
assertEquals(computeSegmentId(startingSegmentNumber + 5, 1), five.segmentId());
List<Long> predecessors = successorsWithPredecessors.get(five);
assertEquals(1, predecessors.size());
assertTrue(predecessors.contains(startingSegmentNumber + 0L));
// scale old segments sealed
stream.scaleOldSegmentsSealed(Collections.emptyMap(), etr, context).join();
newCurrentSegments = stream.getActiveSegments(context).join();
assertEquals(new HashSet<>(newSegments), newCurrentSegments.stream().map(StreamSegmentRecord::segmentId).collect(Collectors.toSet()));
segment = stream.getSegment(segment9, context).join();
assertEquals(computeSegmentId(startingSegmentNumber + 9, 1), segment.segmentId());
assertEquals(segment.getKeyStart(), 1.0 / 5 * 4, 0);
assertEquals(segment.getKeyEnd(), 1.0, 0);
// complete scale
stream.completeScale(etr, context).join();
segment = stream.getSegment(segment9, context).join();
assertEquals(computeSegmentId(startingSegmentNumber + 9, 1), segment.segmentId());
assertEquals(segment.getKeyStart(), 1.0 / 5 * 4, 0);
assertEquals(segment.getKeyEnd(), 1.0, 0);
}
use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.
the class StreamTestBase method testCreateStream.
@Test(timeout = 30000L)
public void testCreateStream() {
OperationContext context = getContext();
PersistentStreamBase stream = createStream("scope", "stream", System.currentTimeMillis(), 2, 0);
assertEquals(State.ACTIVE, stream.getState(true, context).join());
EpochRecord activeEpoch = stream.getActiveEpoch(true, context).join();
assertEquals(0, activeEpoch.getEpoch());
assertEquals(2, activeEpoch.getSegments().size());
VersionedMetadata<StreamTruncationRecord> truncationRecord = stream.getTruncationRecord(context).join();
assertEquals(StreamTruncationRecord.EMPTY, truncationRecord.getObject());
VersionedMetadata<EpochTransitionRecord> etr = stream.getEpochTransition(context).join();
assertEquals(EpochTransitionRecord.EMPTY, etr.getObject());
VersionedMetadata<CommittingTransactionsRecord> ctr = stream.getVersionedCommitTransactionsRecord(context).join();
assertEquals(CommittingTransactionsRecord.EMPTY, ctr.getObject());
assertEquals(activeEpoch, stream.getEpochRecord(0, context).join());
AssertExtensions.assertFutureThrows("", stream.getEpochRecord(1, context), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
}
use of io.pravega.controller.store.stream.records.EpochTransitionRecord in project pravega by pravega.
the class StreamTestBase method scaleInputValidityTest.
@Test(timeout = 30000L)
public void scaleInputValidityTest() {
OperationContext context = getContext();
int startingSegmentNumber = new Random().nextInt(2000);
String name = "stream" + startingSegmentNumber;
PersistentStreamBase stream = createStream("scope", name, System.currentTimeMillis(), 5, startingSegmentNumber);
long timestamp = System.currentTimeMillis();
final double keyRangeChunk = 1.0 / 5;
long s0 = startingSegmentNumber;
long s1 = 1L + startingSegmentNumber;
long s2 = 2L + startingSegmentNumber;
long s3 = 3L + startingSegmentNumber;
long s4 = 4L + startingSegmentNumber;
VersionedMetadata<EpochTransitionRecord> etr = stream.getEpochTransition(context).join();
List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
AtomicReference<List<Map.Entry<Double, Double>>> newRangesRef = new AtomicReference<>(newRanges);
AtomicReference<VersionedMetadata<EpochTransitionRecord>> etrRef = new AtomicReference<>(etr);
// 1. empty newRanges
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 2. simple mismatch
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, keyRangeChunk));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 3. simple valid match
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
etr = stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etr, context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 4. valid 2 disjoint merges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 5. valid 1 merge and 1 disjoint
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(keyRangeChunk, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 1.0));
etr = stream.submitScale(Lists.newArrayList(s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 6. valid 1 merge, 2 splits
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
etr = stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 7. 1 merge, 1 split and 1 invalid split
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 0.99));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1, s3, s4), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 8. valid unsorted segments to seal
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 9. valid unsorted new ranges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.9, 1.0));
newRanges.add(new AbstractMap.SimpleEntry<>(3 * keyRangeChunk, 0.7));
newRanges.add(new AbstractMap.SimpleEntry<>(0.7, 0.8));
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 2 * keyRangeChunk));
newRanges.add(new AbstractMap.SimpleEntry<>(0.8, 0.9));
etr = stream.submitScale(Lists.newArrayList(s4, s0, s1, s3), newRangesRef.get(), timestamp, etrRef.get(), context).join();
etr = resetScale(etr, stream);
etrRef.set(etr);
// 10. invalid input range low == high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 11. invalid input range low > high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.2));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s0, s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 12. invalid overlapping key ranges
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 3 * keyRangeChunk));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1, s2), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 13. invalid overlapping key ranges -- a contains b
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.33));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 14. invalid overlapping key ranges -- b contains a (with b.low == a.low)
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.33));
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 15. invalid overlapping key ranges b.low < a.high
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.35));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// 16. invalid overlapping key ranges.. a.high < b.low
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.25));
newRanges.add(new AbstractMap.SimpleEntry<>(0.3, 0.4));
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.InputInvalidException);
// scale the stream for inconsistent epoch transition
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.4));
scaleStream(stream, System.currentTimeMillis(), Lists.newArrayList(s0, s1), newRanges, Collections.emptyMap());
// 17. precondition failure
newRanges = new ArrayList<>();
newRangesRef.set(newRanges);
newRanges.add(new AbstractMap.SimpleEntry<>(0.2, 0.4));
etrRef.set(stream.getEpochTransition(context).join());
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(Lists.newArrayList(s1), newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
etrRef.set(stream.getEpochTransition(context).join());
// get current number of segments.
List<Long> segments = stream.getActiveSegments(context).join().stream().map(StreamSegmentRecord::segmentId).collect(Collectors.toList());
// set minimum number of segments to segments.size.
stream.startUpdateConfiguration(StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(segments.size())).build(), context).join();
VersionedMetadata<StreamConfigurationRecord> configRecord = stream.getVersionedConfigurationRecord(context).join();
stream.completeUpdateConfiguration(configRecord, context).join();
// attempt a scale down which should be rejected in submit scale.
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 1.0));
newRangesRef.set(newRanges);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.submitScale(segments, newRangesRef.get(), timestamp, etrRef.get(), context), e -> Exceptions.unwrap(e) instanceof EpochTransitionOperationExceptions.PreConditionFailureException);
}
Aggregations