use of io.pravega.shared.NameUtils.computeSegmentId in project pravega by pravega.
the class StreamMetadataTasksTest method truncateStreamTest.
@Test(timeout = 30000)
public void truncateStreamTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(2);
final StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).build();
streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
assertNotEquals(0, consumer.getCurrentSegments(SCOPE, "test", 0L).get().size());
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
List<Map.Entry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 0.75));
newRanges.add(new AbstractMap.SimpleEntry<>(0.75, 1.0));
ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(1L), newRanges, 30, 0L).get();
assertTrue(scaleOpResult.getStatus().equals(ScaleStreamStatus.STARTED));
ScaleOperationTask scaleTask = new ScaleOperationTask(streamMetadataTasks, streamStorePartialMock, executor);
assertTrue(Futures.await(scaleTask.execute((ScaleOpEvent) requestEventWriter.eventQueue.take())));
// start truncation
StreamTruncationRecord truncProp = streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor).join().getObject();
assertFalse(truncProp.isUpdating());
// 1. happy day test
// update.. should succeed
Map<Long, Long> streamCut = new HashMap<>();
streamCut.put(0L, 1L);
streamCut.put(1L, 11L);
CompletableFuture<UpdateStreamStatus.Status> truncateFuture = streamMetadataTasks.truncateStream(SCOPE, "test", streamCut, 0L);
assertTrue(Futures.await(processEvent(requestEventWriter)));
assertEquals(UpdateStreamStatus.Status.SUCCESS, truncateFuture.join());
truncProp = streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor).join().getObject();
assertTrue(truncProp.getStreamCut().equals(streamCut));
assertTrue(truncProp.getStreamCut().equals(streamCut));
// 2. change state to scaling
streamStorePartialMock.setState(SCOPE, "test", State.SCALING, null, executor).get();
// call update should fail without posting the event
long two = NameUtils.computeSegmentId(2, 1);
long three = NameUtils.computeSegmentId(3, 1);
Map<Long, Long> streamCut2 = new HashMap<>();
streamCut2.put(0L, 1L);
streamCut2.put(two, 1L);
streamCut2.put(three, 1L);
streamMetadataTasks.truncateStream(SCOPE, "test", streamCut2, 0L);
AtomicBoolean loop = new AtomicBoolean(false);
Futures.loop(() -> !loop.get(), () -> Futures.delayedFuture(() -> streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor), 1000, executor).thenApply(x -> x.getObject().isUpdating()).thenAccept(loop::set), executor).join();
// event posted, first step performed. now pick the event for processing
TruncateStreamTask truncateStreamTask = new TruncateStreamTask(streamMetadataTasks, streamStorePartialMock, executor);
TruncateStreamEvent taken = (TruncateStreamEvent) requestEventWriter.eventQueue.take();
AssertExtensions.assertFutureThrows("", truncateStreamTask.execute(taken), e -> Exceptions.unwrap(e) instanceof StoreException.OperationNotAllowedException);
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
// now with state = active, process the same event. it should succeed now.
assertTrue(Futures.await(truncateStreamTask.execute(taken)));
// 3. multiple back to back updates.
Map<Long, Long> streamCut3 = new HashMap<>();
streamCut3.put(0L, 12L);
streamCut3.put(two, 12L);
streamCut3.put(three, 12L);
CompletableFuture<UpdateStreamStatus.Status> truncateOp1 = streamMetadataTasks.truncateStream(SCOPE, "test", streamCut3, 0L);
// ensure that previous updatestream has posted the event and set status to updating,
// only then call second updateStream
AtomicBoolean loop2 = new AtomicBoolean(false);
Futures.loop(() -> !loop2.get(), () -> streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor).thenApply(x -> x.getObject().isUpdating()).thenAccept(loop2::set), executor).join();
truncProp = streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor).join().getObject();
assertTrue(truncProp.getStreamCut().equals(streamCut3) && truncProp.isUpdating());
// post the second update request. This should fail here itself as previous one has started.
Map<Long, Long> streamCut4 = new HashMap<>();
streamCut4.put(0L, 14L);
streamCut4.put(two, 14L);
streamCut4.put(three, 14L);
CompletableFuture<UpdateStreamStatus.Status> truncateOpFuture2 = streamMetadataTasks.truncateStream(SCOPE, "test", streamCut4, 0L);
assertEquals(UpdateStreamStatus.Status.FAILURE, truncateOpFuture2.join());
// process event
assertTrue(Futures.await(processEvent(requestEventWriter)));
// verify that first request for update also completes with success.
assertEquals(UpdateStreamStatus.Status.SUCCESS, truncateOp1.join());
truncProp = streamStorePartialMock.getTruncationRecord(SCOPE, "test", null, executor).join().getObject();
assertTrue(truncProp.getStreamCut().equals(streamCut3) && !truncProp.isUpdating());
streamStorePartialMock.setState(SCOPE, "test", State.TRUNCATING, null, executor).join();
TruncateStreamEvent event = new TruncateStreamEvent(SCOPE, "test", System.nanoTime());
assertTrue(Futures.await(truncateStreamTask.execute(event)));
// execute the event again. It should complete without doing anything.
truncateStreamTask.execute(event).join();
assertEquals(State.ACTIVE, streamStorePartialMock.getState(SCOPE, "test", true, null, executor).join());
doReturn(CompletableFuture.completedFuture(true)).when(streamStorePartialMock).isScopeSealed(anyString(), any(), any());
CompletableFuture<CreateStreamResponse> streamResponse = streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor);
CreateStreamResponse.CreateStatus s = streamResponse.get().getStatus();
assertEquals(CreateStreamResponse.CreateStatus.EXISTS_ACTIVE, streamResponse.get().getStatus());
}
use of io.pravega.shared.NameUtils.computeSegmentId in project pravega by pravega.
the class StreamTestBase method testStreamCutsWithMultipleChunks.
/**
* Stream history.
* epoch0 = 0, 1, 2, 3, 4
* epoch1 = 5, 1, 2, 3, 4
* epoch2 = 5, 6, 2, 3, 4
* epoch3 = 5, 6, 7, 3, 4
* epoch4 = 5, 6, 7, 8, 4
* epoch5 = 5, 6, 7, 8, 9
* epoch6 = 0`, 1`, 2`, 3`, 4`
* epoch7 = 5`, 6`, 7`, 8`, 9`
* epoch8 = 10, 6`, 7`, 8`, 9`
* epoch9 = 10, 11, 7`, 8`, 9`
* epoch10 = 10, 11, 12, 8`, 9`
* epoch11 = 10, 11, 12, 13, 9`
* epoch12 = 10, 11, 12, 13, 14
*/
@Test(timeout = 30000L)
public void testStreamCutsWithMultipleChunks() {
String scope = "streamCutTest";
String name = "streamCutTest";
int startingSegmentNumber = new Random().nextInt(2000);
PersistentStreamBase stream = createScaleAndRollStreamForMultiChunkTests(name, scope, startingSegmentNumber, System::currentTimeMillis);
OperationContext context = getContext();
// 0, 1, 2, 3, 4
EpochRecord epoch0 = stream.getEpochRecord(0, context).join();
// 5, 1, 2, 3, 4
EpochRecord epoch1 = stream.getEpochRecord(1, context).join();
// 5, 6, 2, 3, 4
EpochRecord epoch2 = stream.getEpochRecord(2, context).join();
// 5, 6, 7, 3, 4
EpochRecord epoch3 = stream.getEpochRecord(3, context).join();
// 5, 6, 7, 8, 4
EpochRecord epoch4 = stream.getEpochRecord(4, context).join();
// 5, 6, 7, 8, 9
EpochRecord epoch5 = stream.getEpochRecord(5, context).join();
// 0`, 1`, 2`, 3`, 4`
EpochRecord epoch6 = stream.getEpochRecord(6, context).join();
// 5`, 6`, 7`, 8`, 9`
EpochRecord epoch7 = stream.getEpochRecord(7, context).join();
// 10, 6`, 7`, 8`, 9`
EpochRecord epoch8 = stream.getEpochRecord(8, context).join();
// 10, 11, 7`, 8`, 9`
EpochRecord epoch9 = stream.getEpochRecord(9, context).join();
// 10, 11, 12, 8`, 9`
EpochRecord epoch10 = stream.getEpochRecord(10, context).join();
// 10, 11, 12, 13, 9`
EpochRecord epoch11 = stream.getEpochRecord(11, context).join();
// 10, 11, 12, 13, 14
EpochRecord epoch12 = stream.getEpochRecord(12, context).join();
List<Map.Entry<Double, Double>> keyRanges = epoch0.getSegments().stream().map(x -> new AbstractMap.SimpleEntry<>(x.getKeyStart(), x.getKeyEnd())).collect(Collectors.toList());
// create a streamCut1 using 0, 6, 7, 8, 9`
HashMap<Long, Long> streamCut1 = new HashMap<>();
// segment 0 from epoch 0 // sealed in epoch 1
streamCut1.put(epoch0.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(0).getKey(), keyRanges.get(0).getValue())).findAny().get().segmentId(), 10L);
// segment 6 from epoch 2 // sealed in epoch 6
streamCut1.put(epoch2.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(1).getKey(), keyRanges.get(1).getValue())).findAny().get().segmentId(), 10L);
// segment 7 from epoch 3 // sealed in epoch 6
streamCut1.put(epoch3.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(2).getKey(), keyRanges.get(2).getValue())).findAny().get().segmentId(), 10L);
// segment 8 from epoch 5 // sealed in epoch 6
streamCut1.put(epoch5.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(3).getKey(), keyRanges.get(3).getValue())).findAny().get().segmentId(), 10L);
// segment 9` from epoch 7 // created in epoch 7
streamCut1.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(4).getKey(), keyRanges.get(4).getValue())).findAny().get().segmentId(), 10L);
Map<StreamSegmentRecord, Integer> span1 = stream.computeStreamCutSpan(streamCut1, context).join();
assertEquals(0, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 0).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 6).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 7).findAny().get().getValue().intValue());
assertEquals(5, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 8).findAny().get().getValue().intValue());
assertEquals(7, span1.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 9).findAny().get().getValue().intValue());
// create a streamCut2 5, 6`, 12, 8`, 14
HashMap<Long, Long> streamCut2 = new HashMap<>();
// segment 5 from epoch 1 // sealed in epoch 6
streamCut2.put(epoch1.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(0).getKey(), keyRanges.get(0).getValue())).findAny().get().segmentId(), 10L);
// segment 6` from epoch 7 // sealed in epoch 9
streamCut2.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(1).getKey(), keyRanges.get(1).getValue())).findAny().get().segmentId(), 10L);
// segment 12 from epoch 10 // never sealed
streamCut2.put(epoch10.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(2).getKey(), keyRanges.get(2).getValue())).findAny().get().segmentId(), 10L);
// segment 8` from epoch 7 // sealed in epoch 11
streamCut2.put(epoch7.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(3).getKey(), keyRanges.get(3).getValue())).findAny().get().segmentId(), 10L);
// segment 14 from epoch 12 // never sealed
streamCut2.put(epoch12.getSegments().stream().filter(x -> x.overlaps(keyRanges.get(4).getKey(), keyRanges.get(4).getValue())).findAny().get().segmentId(), 10L);
Map<StreamSegmentRecord, Integer> span2 = stream.computeStreamCutSpan(streamCut2, context).join();
assertEquals(5, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 5).findAny().get().getValue().intValue());
assertEquals(8, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 6).findAny().get().getValue().intValue());
assertEquals(12, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 12).findAny().get().getValue().intValue());
assertEquals(10, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 8).findAny().get().getValue().intValue());
assertEquals(12, span2.entrySet().stream().filter(x -> x.getKey().getSegmentNumber() == startingSegmentNumber + 14).findAny().get().getValue().intValue());
Set<StreamSegmentRecord> segmentsBetween = stream.segmentsBetweenStreamCutSpans(span1, span2, context).join();
Set<Long> segmentIdsBetween = segmentsBetween.stream().map(x -> x.segmentId()).collect(Collectors.toSet());
// create a streamCut1 using 0, 6, 7, 8, 9`
// create a streamCut2 5, 6`, 12, 8`, 14
// 0, 5, 6, 1`, 6`, 7, 2`, 7`, 12, 8, 3`, 8`, 9`, 14
Set<Long> expected = new HashSet<>();
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 0, 0));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 5, 1));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 6, 2));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 1, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 6, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 7, 3));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 2, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 7, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 12, 10));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 8, 4));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 3, 6));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 8, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 9, 7));
expected.add(NameUtils.computeSegmentId(startingSegmentNumber + 14, 12));
assertEquals(expected, segmentIdsBetween);
// Note: all sealed segments have sizes 100L. So expected size = 1400 - 10x5 - 90 x 5 = 900
long sizeBetween = stream.sizeBetweenStreamCuts(streamCut1, streamCut2, segmentsBetween, context).join();
assertEquals(900L, sizeBetween);
}
Aggregations