use of io.pravega.controller.store.stream.records.StreamSegmentRecord in project pravega by pravega.
the class StreamTestBase method truncationTest.
@Test(timeout = 30000L)
public void truncationTest() {
OperationContext context = getContext();
int startingSegmentNumber = new Random().nextInt(2000);
// epoch 0 --> 0, 1
long timestamp = System.currentTimeMillis();
PersistentStreamBase stream = createStream("scope", "stream" + startingSegmentNumber, timestamp, 2, startingSegmentNumber);
List<StreamSegmentRecord> activeSegments = stream.getActiveSegments(context).join();
// epoch 1 --> 0, 2, 3
List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 0.75));
newRanges.add(new AbstractMap.SimpleEntry<>(0.75, 1.0));
Map<Long, Long> map = new HashMap<>();
map.put(startingSegmentNumber + 1L, 100L);
scaleStream(stream, ++timestamp, Lists.newArrayList(startingSegmentNumber + 1L), newRanges, map);
long twoSegmentId = computeSegmentId(startingSegmentNumber + 2, 1);
long threeSegmentId = computeSegmentId(startingSegmentNumber + 3, 1);
// epoch 2 --> 0, 2, 4, 5
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>(0.75, (0.75 + 1.0) / 2));
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>((0.75 + 1.0) / 2, 1.0));
map = new HashMap<>();
map.put(threeSegmentId, 100L);
scaleStream(stream, ++timestamp, Lists.newArrayList(threeSegmentId), newRanges, map);
long fourSegmentId = computeSegmentId(startingSegmentNumber + 4, 2);
long fiveSegmentId = computeSegmentId(startingSegmentNumber + 5, 2);
// epoch 3 --> 0, 4, 5, 6, 7
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>(0.5, (0.75 + 0.5) / 2));
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>((0.75 + 0.5) / 2, 0.75));
map = new HashMap<>();
map.put(twoSegmentId, 100L);
scaleStream(stream, ++timestamp, Lists.newArrayList(twoSegmentId), newRanges, map);
long sixSegmentId = computeSegmentId(startingSegmentNumber + 6, 3);
long sevenSegmentId = computeSegmentId(startingSegmentNumber + 7, 3);
// epoch 4 --> 4, 5, 6, 7, 8, 9
newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>(0.0, (0.0 + 0.5) / 2));
newRanges.add(new AbstractMap.SimpleEntry<Double, Double>((0.0 + 0.5) / 2, 0.5));
map = new HashMap<>();
map.put(startingSegmentNumber + 0L, 100L);
scaleStream(stream, ++timestamp, Lists.newArrayList(startingSegmentNumber + 0L), newRanges, map);
long eightSegmentId = computeSegmentId(startingSegmentNumber + 8, 4);
long nineSegmentId = computeSegmentId(startingSegmentNumber + 9, 4);
// first stream cut
Map<Long, Long> streamCut1 = new HashMap<>();
streamCut1.put(startingSegmentNumber + 0L, 1L);
streamCut1.put(startingSegmentNumber + 1L, 1L);
stream.startTruncation(streamCut1, context).join();
VersionedMetadata<StreamTruncationRecord> versionedTruncationRecord = stream.getTruncationRecord(context).join();
StreamTruncationRecord truncationRecord = versionedTruncationRecord.getObject();
assertTrue(truncationRecord.getToDelete().isEmpty());
assertEquals(truncationRecord.getStreamCut(), streamCut1);
Map<Long, Integer> transform = transform(truncationRecord.getSpan());
assertTrue(transform.get(startingSegmentNumber + 0L) == 0 && transform.get(startingSegmentNumber + 1L) == 0);
stream.completeTruncation(versionedTruncationRecord, context).join();
// getActiveSegments wrt first truncation record which is on epoch 0
Map<Long, Long> activeSegmentsWithOffset;
// 1. truncationRecord = 0/1, 1/1
// expected active segments with offset = 0/1, 1/1
activeSegmentsWithOffset = stream.getSegmentsAtHead(context).join().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(activeSegmentsWithOffset.size() == 2 && activeSegmentsWithOffset.containsKey(startingSegmentNumber + 0L) && activeSegmentsWithOffset.containsKey(startingSegmentNumber + 1L) && activeSegmentsWithOffset.get(startingSegmentNumber + 0L) == 1L && activeSegmentsWithOffset.get(startingSegmentNumber + 1L) == 1L);
// second stream cut
Map<Long, Long> streamCut2 = new HashMap<>();
streamCut2.put(startingSegmentNumber + 0L, 1L);
streamCut2.put(twoSegmentId, 1L);
streamCut2.put(fourSegmentId, 1L);
streamCut2.put(fiveSegmentId, 1L);
stream.startTruncation(streamCut2, context).join();
versionedTruncationRecord = stream.getTruncationRecord(context).join();
truncationRecord = versionedTruncationRecord.getObject();
assertEquals(truncationRecord.getStreamCut(), streamCut2);
assertTrue(truncationRecord.getToDelete().size() == 2 && truncationRecord.getToDelete().contains(startingSegmentNumber + 1L) && truncationRecord.getToDelete().contains(threeSegmentId));
assertTrue(truncationRecord.getStreamCut().equals(streamCut2));
transform = transform(truncationRecord.getSpan());
assertTrue(transform.get(startingSegmentNumber + 0L) == 2 && transform.get(twoSegmentId) == 2 && transform.get(fourSegmentId) == 2 && transform.get(fiveSegmentId) == 2);
stream.completeTruncation(versionedTruncationRecord, context).join();
// 2. truncationRecord = 0/1, 2/1, 4/1, 5/1.
// expected active segments = 0/1, 2/1, 4/1, 5/1
activeSegmentsWithOffset = stream.getSegmentsAtHead(context).join().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(activeSegmentsWithOffset.size() == 4 && activeSegmentsWithOffset.containsKey(startingSegmentNumber + 0L) && activeSegmentsWithOffset.containsKey(twoSegmentId) && activeSegmentsWithOffset.containsKey(fourSegmentId) && activeSegmentsWithOffset.containsKey(fiveSegmentId) && activeSegmentsWithOffset.get(startingSegmentNumber + 0L) == 1L && activeSegmentsWithOffset.get(twoSegmentId) == 1L && activeSegmentsWithOffset.get(fourSegmentId) == 1L && activeSegmentsWithOffset.get(fiveSegmentId) == 1L);
// third stream cut
Map<Long, Long> streamCut3 = new HashMap<>();
streamCut3.put(twoSegmentId, 10L);
streamCut3.put(fourSegmentId, 10L);
streamCut3.put(fiveSegmentId, 10L);
streamCut3.put(eightSegmentId, 10L);
streamCut3.put(nineSegmentId, 10L);
stream.startTruncation(streamCut3, context).join();
versionedTruncationRecord = stream.getTruncationRecord(context).join();
truncationRecord = versionedTruncationRecord.getObject();
assertEquals(truncationRecord.getStreamCut(), streamCut3);
assertTrue(truncationRecord.getToDelete().size() == 1 && truncationRecord.getToDelete().contains(startingSegmentNumber + 0L));
assertTrue(truncationRecord.getStreamCut().equals(streamCut3));
transform = transform(truncationRecord.getSpan());
assertTrue(transform.get(twoSegmentId) == 2 && transform.get(fourSegmentId) == 4 && transform.get(fiveSegmentId) == 4 && transform.get(eightSegmentId) == 4 && transform.get(nineSegmentId) == 4);
stream.completeTruncation(versionedTruncationRecord, context).join();
// 3. truncation record 2/10, 4/10, 5/10, 8/10, 9/10
// getActiveSegments wrt first truncation record which spans epoch 2 to 4
// expected active segments = 2/10, 4/10, 5/10, 8/10, 9/10
activeSegmentsWithOffset = stream.getSegmentsAtHead(context).join().entrySet().stream().collect(Collectors.toMap(x -> x.getKey().segmentId(), x -> x.getValue()));
assertTrue(activeSegmentsWithOffset.size() == 5 && activeSegmentsWithOffset.containsKey(twoSegmentId) && activeSegmentsWithOffset.containsKey(fourSegmentId) && activeSegmentsWithOffset.containsKey(fiveSegmentId) && activeSegmentsWithOffset.containsKey(eightSegmentId) && activeSegmentsWithOffset.containsKey(nineSegmentId) && activeSegmentsWithOffset.get(twoSegmentId) == 10L && activeSegmentsWithOffset.get(fourSegmentId) == 10L && activeSegmentsWithOffset.get(fiveSegmentId) == 10L && activeSegmentsWithOffset.get(eightSegmentId) == 10L && activeSegmentsWithOffset.get(nineSegmentId) == 10L);
// behind previous
Map<Long, Long> streamCut4 = new HashMap<>();
streamCut4.put(twoSegmentId, 1L);
streamCut4.put(fourSegmentId, 1L);
streamCut4.put(fiveSegmentId, 1L);
streamCut4.put(eightSegmentId, 1L);
streamCut4.put(nineSegmentId, 1L);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.startTruncation(streamCut4, context), e -> e instanceof IllegalArgumentException);
Map<Long, Long> streamCut5 = new HashMap<>();
streamCut5.put(twoSegmentId, 10L);
streamCut5.put(fourSegmentId, 10L);
streamCut5.put(fiveSegmentId, 10L);
streamCut5.put(startingSegmentNumber + 0L, 10L);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.startTruncation(streamCut5, context), e -> e instanceof IllegalArgumentException);
}
use of io.pravega.controller.store.stream.records.StreamSegmentRecord in project pravega by pravega.
the class StreamTestBase method segmentQueriesDuringRollingTxn.
@Test(timeout = 30000L)
public void segmentQueriesDuringRollingTxn() {
OperationContext context = getContext();
// start scale and perform `getSegment`, `getActiveEpoch` and `getEpoch` during different phases of scale
int startingSegmentNumber = new Random().nextInt(2000);
long time = System.currentTimeMillis();
Stream stream = createStream("scope", "stream" + startingSegmentNumber, time, 5, startingSegmentNumber);
StreamSegmentRecord segment = stream.getSegment(startingSegmentNumber, context).join();
assertEquals(segment.segmentId(), startingSegmentNumber + 0L);
assertEquals(segment.getKeyStart(), 0, 0);
assertEquals(segment.getKeyEnd(), 1.0 / 5, 0);
createAndCommitTransaction(stream, 0, 0L);
List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.5));
newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 1.0));
time = time + 1;
scaleStream(stream, time, Lists.newArrayList(startingSegmentNumber + 0L, startingSegmentNumber + 1L, startingSegmentNumber + 2L, startingSegmentNumber + 3L, startingSegmentNumber + 4L), newRanges, Collections.emptyMap());
List<StreamSegmentRecord> activeSegmentsBefore = stream.getActiveSegments(context).join();
// start commit transactions
VersionedMetadata<CommittingTransactionsRecord> ctr = stream.startCommittingTransactions(100, context).join().getKey();
stream.getVersionedState(context).thenCompose(s -> stream.updateVersionedState(s, State.COMMITTING_TXN, context)).join();
// start rolling transaction
ctr = stream.startRollingTxn(1, ctr, context).join();
List<StreamSegmentRecord> activeSegments1 = stream.getActiveSegments(context).join();
assertEquals(activeSegments1, activeSegmentsBefore);
Map<StreamSegmentRecord, List<Long>> successorsWithPredecessors = stream.getSuccessorsWithPredecessors(computeSegmentId(startingSegmentNumber + 5, 1), context).join();
assertTrue(successorsWithPredecessors.isEmpty());
// rolling txn create duplicate epochs. We should be able to get successors and predecessors after this step.
time = time + 1;
stream.rollingTxnCreateDuplicateEpochs(Collections.emptyMap(), time, ctr, context).join();
activeSegments1 = stream.getActiveSegments(context).join();
assertEquals(activeSegments1, activeSegmentsBefore);
successorsWithPredecessors = stream.getSuccessorsWithPredecessors(computeSegmentId(startingSegmentNumber + 5, 1), context).join();
Set<StreamSegmentRecord> successors = successorsWithPredecessors.keySet();
assertEquals(3, successors.size());
assertTrue(successors.stream().allMatch(x -> x.getCreationEpoch() == 2));
assertTrue(successors.stream().anyMatch(x -> x.getSegmentNumber() == startingSegmentNumber + 0));
assertTrue(successors.stream().anyMatch(x -> x.getSegmentNumber() == startingSegmentNumber + 1));
assertTrue(successors.stream().anyMatch(x -> x.getSegmentNumber() == startingSegmentNumber + 2));
successorsWithPredecessors = stream.getSuccessorsWithPredecessors(computeSegmentId(startingSegmentNumber + 0, 2), context).join();
successors = successorsWithPredecessors.keySet();
assertEquals(1, successors.size());
assertTrue(successors.stream().allMatch(x -> x.segmentId() == computeSegmentId(startingSegmentNumber + 5, 3)));
stream.completeRollingTxn(Collections.emptyMap(), ctr, context).join();
stream.completeCommittingTransactions(ctr, context, Collections.emptyMap()).join();
}
use of io.pravega.controller.store.stream.records.StreamSegmentRecord in project pravega by pravega.
the class StreamTestBase method testStreamCutsWithMultipleChunks.
/**
* Stream history.
* epoch0 = 0, 1, 2, 3, 4
* epoch1 = 5, 1, 2, 3, 4
* epoch2 = 5, 6, 2, 3, 4
* epoch3 = 5, 6, 7, 3, 4
* epoch4 = 5, 6, 7, 8, 4
* epoch5 = 5, 6, 7, 8, 9
* epoch6 = 0`, 1`, 2`, 3`, 4`
* epoch7 = 5`, 6`, 7`, 8`, 9`
* epoch8 = 10, 6`, 7`, 8`, 9`
* epoch9 = 10, 11, 7`, 8`, 9`
* epoch10 = 10, 11, 12, 8`, 9`
* epoch11 = 10, 11, 12, 13, 9`
* epoch12 = 10, 11, 12, 13, 14
*/
@Test(timeout = 30000L)
public void testStreamCutsWithMultipleChunks() {
String scope = "streamCutTest";
String name = "streamCutTest";
int startingSegmentNumber = new Random().nextInt(2000);
PersistentStreamBase stream = createScaleAndRollStreamForMultiChunkTests(name, scope, startingSegmentNumber, System::currentTimeMillis);
OperationContext context = getContext();
// 0, 1, 2, 3, 4
EpochRecord epoch0 = stream.getEpochRecord(0, context).join();
// 5, 1, 2, 3, 4
EpochRecord epoch1 = stream.getEpochRecord(1, context).join();
// 5, 6, 2, 3, 4
EpochRecord epoch2 = stream.getEpochRecord(2, context).join();
// 5, 6, 7, 3, 4
EpochRecord epoch3 = stream.getEpochRecord(3, context).join();
// 5, 6, 7, 8, 4
EpochRecord epoch4 = stream.getEpochRecord(4, context).join();
// 5, 6, 7, 8, 9
EpochRecord epoch5 = stream.getEpochRecord(5, context).join();
// 0`, 1`, 2`, 3`, 4`
EpochRecord epoch6 = stream.getEpochRecord(6, context).join();
// 5`, 6`, 7`, 8`, 9`
EpochRecord epoch7 = stream.getEpochRecord(7, context).join();
// 10, 6`, 7`, 8`, 9`
EpochRecord epoch8 = stream.getEpochRecord(8, context).join();
// 10, 11, 7`, 8`, 9`
EpochRecord epoch9 = stream.getEpochRecord(9, context).join();
// 10, 11, 12, 8`, 9`
EpochRecord epoch10 = stream.getEpochRecord(10, context).join();
// 10, 11, 12, 13, 9`
EpochRecord epoch11 = stream.getEpochRecord(11, context).join();
// 10, 11, 12, 13, 14
EpochRecord epoch12 = stream.getEpochRecord(12, context).join();
List<Map.Entry<Double, Double>> keyRanges = epoch0.getSegments().stream().map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd())).collect(Collectors.toList());
// create a streamCut1 using 0, 6, 7, 8, 9`
HashMap<Long, Long> streamCut1 = new HashMap<>();
// segment 0 from epoch 0 // sealed in epoch 1
streamCut1.put(epoch0.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(0).getKey(), keyRanges.get(0).getValue())).findAny().get().segmentId(), 10L);
// segment 6 from epoch 2 // sealed in epoch 6
streamCut1.put(epoch2.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(1).getKey(), keyRanges.get(1).getValue())).findAny().get().segmentId(), 10L);
// segment 7 from epoch 3 // sealed in epoch 6
streamCut1.put(epoch3.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(2).getKey(), keyRanges.get(2).getValue())).findAny().get().segmentId(), 10L);
// segment 8 from epoch 5 // sealed in epoch 6
streamCut1.put(epoch5.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(3).getKey(), keyRanges.get(3).getValue())).findAny().get().segmentId(), 10L);
// segment 9` from epoch 7 // created in epoch 7
streamCut1.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(4).getKey(), keyRanges.get(4).getValue())).findAny().get().segmentId(), 10L);
Map<StreamSegmentRecord, Integer> span1 = stream.computeStreamCutSpan(streamCut1, context).join();
assertEquals(0, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 0).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 6).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 7).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 8).findAny().get().getValue().intValue());
assertEquals(7, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 9).findAny().get().getValue().intValue());
// create a streamCut2 5, 6`, 12, 8`, 14
HashMap<Long, Long> streamCut2 = new HashMap<>();
// segment 5 from epoch 1 // sealed in epoch 6
streamCut2.put(epoch1.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(0).getKey(), keyRanges.get(0).getValue())).findAny().get().segmentId(), 10L);
// segment 6` from epoch 7 // sealed in epoch 9
streamCut2.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(1).getKey(), keyRanges.get(1).getValue())).findAny().get().segmentId(), 10L);
// segment 12 from epoch 10 // never sealed
streamCut2.put(epoch10.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(2).getKey(), keyRanges.get(2).getValue())).findAny().get().segmentId(), 10L);
// segment 8` from epoch 7 // sealed in epoch 11
streamCut2.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(3).getKey(), keyRanges.get(3).getValue())).findAny().get().segmentId(), 10L);
// segment 14 from epoch 12 // never sealed
streamCut2.put(epoch12.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(4).getKey(), keyRanges.get(4).getValue())).findAny().get().segmentId(), 10L);
Map<StreamSegmentRecord, Integer> span2 = stream.computeStreamCutSpan(streamCut2, context).join();
assertEquals(5, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 5).findAny().get().getValue().intValue());
assertEquals(8, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 6).findAny().get().getValue().intValue());
assertEquals(12, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 12).findAny().get().getValue().intValue());
assertEquals(10, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 8).findAny().get().getValue().intValue());
assertEquals(12, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 14).findAny().get().getValue().intValue());
Set<StreamSegmentRecord> segmentsBetween = stream.segmentsBetweenStreamCutSpans(span1, span2, context).join();
Set<Long> segmentIdsBetween = segmentsBetween.stream().map(x -> x.segmentId()).collect(Collectors.toSet());
// create a streamCut1 using 0, 6, 7, 8, 9`
// create a streamCut2 5, 6`, 12, 8`, 14
// 0, 5, 6, 1`, 6`, 7, 2`, 7`, 12, 8, 3`, 8`, 9`, 14
Set<Long> expected = new HashSet<>();
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 0, 0));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 5, 1));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 6, 2));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 1, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 6, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 7, 3));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 2, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 7, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 12, 10));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 8, 4));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 3, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 8, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 9, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 14, 12));
assertEquals(expected, segmentIdsBetween);
// Note: all sealed segments have sizes 100L. So expected size = 1400 - 10x5 - 90 x 5 = 900
long sizeBetween = stream.sizeBetweenStreamCuts(streamCut1, streamCut2, segmentsBetween, context).join();
assertEquals(900L, sizeBetween);
}
use of io.pravega.controller.store.stream.records.StreamSegmentRecord in project pravega by pravega.
the class StreamTestBase method testStreamCutSegmentsBetween.
// endregion
/*
Segment mapping of stream8 used for the below tests.
+-------+------+-------+-------+
| | 8 | | |
| 2 +------| | |
| | 7 | 10 | |
+-------+ -----| | |
| | 6 | | |
| 1 +------+-------+-------+
| | 5 | | |
+-------+------| | |
| | 4 | 9 | |
| 0 +------| | |
| | 3 | | |
+-------+------+----------------
*/
@Test(timeout = 30000L)
public void testStreamCutSegmentsBetween() {
int startingSegmentNumber = new Random().nextInt(2000);
List<AbstractMap.SimpleEntry<Double, Double>> newRanges;
long timestamp = System.currentTimeMillis();
PersistentStreamBase stream = createStream("scope", "stream" + startingSegmentNumber, timestamp, 3, startingSegmentNumber);
OperationContext context = getContext();
List<StreamSegmentRecord> initialSegments = stream.getActiveSegments(context).join();
StreamSegmentRecord zero = initialSegments.stream().filter(x -> x.segmentId() == computeSegmentId(startingSegmentNumber + 0, 0)).findAny().get();
StreamSegmentRecord one = initialSegments.stream().filter(x -> x.segmentId() == computeSegmentId(startingSegmentNumber + 1, 0)).findAny().get();
StreamSegmentRecord two = initialSegments.stream().filter(x -> x.segmentId() == computeSegmentId(startingSegmentNumber + 2, 0)).findAny().get();
StreamSegmentRecord three = new StreamSegmentRecord(startingSegmentNumber + 3, 1, timestamp, 0.0, 0.16);
StreamSegmentRecord four = new StreamSegmentRecord(startingSegmentNumber + 4, 1, timestamp, 0.16, zero.getKeyEnd());
StreamSegmentRecord five = new StreamSegmentRecord(startingSegmentNumber + 5, 1, timestamp, one.getKeyStart(), 0.5);
StreamSegmentRecord six = new StreamSegmentRecord(startingSegmentNumber + 6, 1, timestamp, 0.5, one.getKeyEnd());
StreamSegmentRecord seven = new StreamSegmentRecord(startingSegmentNumber + 7, 1, timestamp, two.getKeyStart(), 0.83);
StreamSegmentRecord eight = new StreamSegmentRecord(startingSegmentNumber + 8, 1, timestamp, 0.83, two.getKeyEnd());
StreamSegmentRecord nine = new StreamSegmentRecord(startingSegmentNumber + 9, 2, timestamp, 0.0, 0.5);
StreamSegmentRecord ten = new StreamSegmentRecord(startingSegmentNumber + 10, 2, timestamp, 0.5, 1);
// 2 -> 7, 8
// 1 -> 5, 6
// 0 -> 3, 4
LinkedList<StreamSegmentRecord> newsegments = new LinkedList<>();
newsegments.add(three);
newsegments.add(four);
newsegments.add(five);
newsegments.add(six);
newsegments.add(seven);
newsegments.add(eight);
List<Long> segmentsToSeal = stream.getActiveSegments(context).join().stream().map(x -> x.segmentId()).collect(Collectors.toList());
newRanges = newsegments.stream().map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd())).collect(Collectors.toList());
scaleStream(stream, ++timestamp, segmentsToSeal, new LinkedList<>(newRanges), Collections.emptyMap());
// 6, 7, 8 -> 10
// 3, 4, 5 -> 9
newsegments = new LinkedList<>();
newsegments.add(nine);
newsegments.add(ten);
segmentsToSeal = stream.getActiveSegments(context).join().stream().map(x -> x.segmentId()).collect(Collectors.toList());
newRanges = newsegments.stream().map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd())).collect(Collectors.toList());
scaleStream(stream, ++timestamp, segmentsToSeal, new LinkedList<>(newRanges), Collections.emptyMap());
// only from
Map<Long, Long> fromStreamCut = new HashMap<>();
fromStreamCut.put(zero.segmentId(), 0L);
fromStreamCut.put(one.segmentId(), 0L);
fromStreamCut.put(two.segmentId(), 0L);
List<StreamSegmentRecord> segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCut, Collections.emptyMap(), context).join();
assertEquals(11, segmentsBetween.size());
fromStreamCut = new HashMap<>();
fromStreamCut.put(zero.segmentId(), 0L);
fromStreamCut.put(two.segmentId(), 0L);
fromStreamCut.put(five.segmentId(), 0L);
fromStreamCut.put(six.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCut, Collections.emptyMap(), context).join();
assertEquals(10, segmentsBetween.size());
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == one.segmentId()));
fromStreamCut = new HashMap<>();
fromStreamCut.put(zero.segmentId(), 0L);
fromStreamCut.put(five.segmentId(), 0L);
fromStreamCut.put(ten.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCut, Collections.emptyMap(), context).join();
assertEquals(6, segmentsBetween.size());
// 0, 3, 4, 5, 9, 10
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == one.segmentId() || x.segmentId() == two.segmentId() || x.segmentId() == six.segmentId() || x.segmentId() == seven.segmentId() || x.segmentId() == eight.segmentId()));
fromStreamCut = new HashMap<>();
fromStreamCut.put(six.segmentId(), 0L);
fromStreamCut.put(seven.segmentId(), 0L);
fromStreamCut.put(eight.segmentId(), 0L);
fromStreamCut.put(nine.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCut, Collections.emptyMap(), context).join();
assertEquals(5, segmentsBetween.size());
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == one.segmentId() || x.segmentId() == two.segmentId() || x.segmentId() == three.segmentId() || x.segmentId() == four.segmentId() || x.segmentId() == five.segmentId()));
fromStreamCut = new HashMap<>();
fromStreamCut.put(ten.segmentId(), 0L);
fromStreamCut.put(nine.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCut, Collections.emptyMap(), context).join();
assertEquals(2, segmentsBetween.size());
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == one.segmentId() || x.segmentId() == two.segmentId() || x.segmentId() == three.segmentId() || x.segmentId() == four.segmentId() || x.segmentId() == five.segmentId() || x.segmentId() == six.segmentId() || x.segmentId() == seven.segmentId() || x.segmentId() == eight.segmentId()));
// to before from
fromStreamCut = new HashMap<>();
fromStreamCut.put(three.segmentId(), 0L);
fromStreamCut.put(four.segmentId(), 0L);
fromStreamCut.put(one.segmentId(), 0L);
fromStreamCut.put(two.segmentId(), 0L);
Map<Long, Long> toStreamCut = new HashMap<>();
toStreamCut.put(zero.segmentId(), 0L);
toStreamCut.put(one.segmentId(), 0L);
toStreamCut.put(two.segmentId(), 0L);
Map<Long, Long> fromStreamCutCopy = fromStreamCut;
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.getSegmentsBetweenStreamCuts(fromStreamCutCopy, toStreamCut, context), e -> Exceptions.unwrap(e) instanceof IllegalArgumentException);
// to and from overlap
Map<Long, Long> fromStreamCutOverlap = new HashMap<>();
fromStreamCutOverlap.put(three.segmentId(), 0L);
fromStreamCutOverlap.put(four.segmentId(), 0L);
fromStreamCutOverlap.put(one.segmentId(), 0L);
fromStreamCutOverlap.put(two.segmentId(), 0L);
Map<Long, Long> toStreamCutOverlap = new HashMap<>();
toStreamCutOverlap.put(zero.segmentId(), 0L);
toStreamCutOverlap.put(five.segmentId(), 0L);
toStreamCutOverlap.put(six.segmentId(), 0L);
toStreamCutOverlap.put(two.segmentId(), 0L);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.getSegmentsBetweenStreamCuts(fromStreamCutOverlap, toStreamCutOverlap, context), e -> Exceptions.unwrap(e) instanceof IllegalArgumentException);
Map<Long, Long> fromPartialOverlap = new HashMap<>();
fromPartialOverlap.put(zero.segmentId(), 0L);
fromPartialOverlap.put(five.segmentId(), 0L);
fromPartialOverlap.put(six.segmentId(), 0L);
fromPartialOverlap.put(two.segmentId(), 0L);
Map<Long, Long> toPartialOverlap = new HashMap<>();
toPartialOverlap.put(eight.segmentId(), 0L);
toPartialOverlap.put(seven.segmentId(), 0L);
toPartialOverlap.put(one.segmentId(), 0L);
toPartialOverlap.put(three.segmentId(), 0L);
toPartialOverlap.put(four.segmentId(), 0L);
AssertExtensions.assertSuppliedFutureThrows("", () -> stream.getSegmentsBetweenStreamCuts(fromPartialOverlap, toPartialOverlap, context), e -> Exceptions.unwrap(e) instanceof IllegalArgumentException);
// Success cases
Map<Long, Long> fromStreamCutSuccess = new HashMap<>();
fromStreamCutSuccess.put(zero.segmentId(), 0L);
fromStreamCutSuccess.put(one.segmentId(), 0L);
fromStreamCutSuccess.put(two.segmentId(), 0L);
Map<Long, Long> toStreamCutSuccess = new HashMap<>();
toStreamCutSuccess.put(zero.segmentId(), 0L);
toStreamCutSuccess.put(five.segmentId(), 0L);
toStreamCutSuccess.put(six.segmentId(), 0L);
toStreamCutSuccess.put(two.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCutSuccess, toStreamCutSuccess, context).join();
assertEquals(5, segmentsBetween.size());
assertTrue(segmentsBetween.stream().allMatch(x -> x.segmentId() == zero.segmentId() || x.segmentId() == one.segmentId() || x.segmentId() == two.segmentId() || x.segmentId() == five.segmentId() || x.segmentId() == six.segmentId()));
fromStreamCutSuccess = new HashMap<>();
fromStreamCutSuccess.put(zero.segmentId(), 0L);
fromStreamCutSuccess.put(five.segmentId(), 0L);
fromStreamCutSuccess.put(six.segmentId(), 0L);
fromStreamCutSuccess.put(two.segmentId(), 0L);
toStreamCutSuccess = new HashMap<>();
toStreamCutSuccess.put(nine.segmentId(), 0L);
toStreamCutSuccess.put(ten.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(fromStreamCutSuccess, toStreamCutSuccess, context).join();
assertEquals(10, segmentsBetween.size());
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == one.segmentId()));
// empty from
toStreamCutSuccess = new HashMap<>();
toStreamCutSuccess.put(zero.segmentId(), 0L);
toStreamCutSuccess.put(five.segmentId(), 0L);
toStreamCutSuccess.put(six.segmentId(), 0L);
toStreamCutSuccess.put(two.segmentId(), 0L);
segmentsBetween = stream.getSegmentsBetweenStreamCuts(Collections.emptyMap(), toStreamCutSuccess, context).join();
assertEquals(5, segmentsBetween.size());
assertTrue(segmentsBetween.stream().noneMatch(x -> x.segmentId() == three.segmentId() || x.segmentId() == four.segmentId() || x.segmentId() == seven.segmentId() || x.segmentId() == eight.segmentId() || x.segmentId() == nine.segmentId() || x.segmentId() == ten.segmentId()));
}
use of io.pravega.controller.store.stream.records.StreamSegmentRecord in project pravega by pravega.
the class StreamTransactionMetadataTasksTest method writerInitializationTest.
@Test(timeout = 10000)
public void writerInitializationTest() throws Exception {
EventStreamWriterMock<CommitEvent> commitWriter = new EventStreamWriterMock<>();
EventStreamWriterMock<AbortEvent> abortWriter = new EventStreamWriterMock<>();
StreamMetadataStore streamStoreMock = spy(StreamStoreFactory.createZKStore(zkClient, executor));
final long leasePeriod = 5000;
// region close before initialize
txnTasks = new StreamTransactionMetadataTasks(streamStoreMock, SegmentHelperMock.getSegmentHelperMock(), executor, "host", new GrpcAuthHelper(this.authEnabled, "secret", 300));
CompletableFuture<Void> future = txnTasks.writeCommitEvent(new CommitEvent("scope", "stream", 0));
assertFalse(future.isDone());
txnTasks.close();
AssertExtensions.assertFutureThrows("", future, e -> Exceptions.unwrap(e) instanceof CancellationException);
// endregion
// region test initialize writers with client factory
txnTasks = new StreamTransactionMetadataTasks(streamStoreMock, SegmentHelperMock.getSegmentHelperMock(), executor, "host", new GrpcAuthHelper(this.authEnabled, "secret", 300));
future = txnTasks.writeCommitEvent(new CommitEvent("scope", "stream", 0));
EventStreamClientFactory cfMock = mock(EventStreamClientFactory.class);
ControllerEventProcessorConfig eventProcConfigMock = mock(ControllerEventProcessorConfig.class);
String commitStream = "commitStream";
doAnswer(x -> commitStream).when(eventProcConfigMock).getCommitStreamName();
doAnswer(x -> commitWriter).when(cfMock).createEventWriter(eq(commitStream), any(), any());
String abortStream = "abortStream";
doAnswer(x -> abortStream).when(eventProcConfigMock).getAbortStreamName();
doAnswer(x -> abortWriter).when(cfMock).createEventWriter(eq(abortStream), any(), any());
// future should not have completed as we have not initialized the writers.
assertFalse(future.isDone());
// initialize the writers. write future should have completed now.
txnTasks.initializeStreamWriters(cfMock, eventProcConfigMock);
assertTrue(Futures.await(future));
txnTasks.close();
// endregion
// region test method calls and initialize writers with direct writer set up method call
txnTasks = new StreamTransactionMetadataTasks(streamStoreMock, SegmentHelperMock.getSegmentHelperMock(), executor, "host", new GrpcAuthHelper(this.authEnabled, "secret", 300));
streamStore.createScope(SCOPE, null, executor).join();
streamStore.createStream(SCOPE, STREAM, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build(), 1L, null, executor).join();
streamStore.setState(SCOPE, STREAM, State.ACTIVE, null, executor).join();
CompletableFuture<Pair<VersionedTransactionData, List<StreamSegmentRecord>>> createFuture = txnTasks.createTxn(SCOPE, STREAM, leasePeriod, 0L, 0L);
// create and ping transactions should not wait for writer initialization and complete immediately.
createFuture.join();
assertTrue(Futures.await(createFuture));
UUID txnId = createFuture.join().getKey().getId();
CompletableFuture<PingTxnStatus> pingFuture = txnTasks.pingTxn(SCOPE, STREAM, txnId, leasePeriod, 0L);
assertTrue(Futures.await(pingFuture));
CompletableFuture<TxnStatus> commitFuture = txnTasks.commitTxn(SCOPE, STREAM, txnId, 0L);
assertFalse(commitFuture.isDone());
txnTasks.initializeStreamWriters(commitWriter, abortWriter);
assertTrue(Futures.await(commitFuture));
UUID txnId2 = txnTasks.createTxn(SCOPE, STREAM, leasePeriod, 0L, 1024 * 1024L).join().getKey().getId();
assertTrue(Futures.await(txnTasks.abortTxn(SCOPE, STREAM, txnId2, null, 0L)));
}
Aggregations