use of io.pravega.client.stream.ScalingPolicy in project pravega by pravega.
the class ZkStreamTest method testZkStream.
@Test
public void testZkStream() throws Exception {
double keyChunk = 1.0 / 5;
final ScalingPolicy policy = ScalingPolicy.fixed(5);
final StreamMetadataStore store = new ZKStreamMetadataStore(cli, executor);
final String streamName = "test";
store.createScope(SCOPE).get();
StreamConfiguration streamConfig = StreamConfiguration.builder().scope(streamName).streamName(streamName).scalingPolicy(policy).build();
store.createStream(SCOPE, streamName, streamConfig, System.currentTimeMillis(), null, executor).get();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).get();
OperationContext context = store.createContext(SCOPE, streamName);
List<Segment> segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 1, 2, 3, 4).contains(x.getNumber())));
long start = segments.get(0).getStart();
assertEquals(store.getConfiguration(SCOPE, streamName, context, executor).get(), streamConfig);
List<AbstractMap.SimpleEntry<Double, Double>> newRanges;
// existing range 0 = 0 - .2, 1 = .2 - .4, 2 = .4 - .6, 3 = .6 - .8, 4 = .8 - 1.0
// 3, 4 -> 5 = .6 - 1.0
newRanges = Collections.singletonList(new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale1 = start + 10000;
ArrayList<Integer> sealedSegments = Lists.newArrayList(3, 4);
StartScaleResponse response = store.startScale(SCOPE, streamName, sealedSegments, newRanges, scale1, false, context, executor).get();
List<Segment> newSegments = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments, newSegments, response.getActiveEpoch(), scale1, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), newSegments, response.getActiveEpoch(), scale1, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 1, 2, 5).contains(x.getNumber())));
// 1 -> 6 = 0.2 -.3, 7 = .3 - .4
// 2,5 -> 8 = .4 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(keyChunk, 0.3), new AbstractMap.SimpleEntry<>(0.3, 2 * keyChunk), new AbstractMap.SimpleEntry<>(2 * keyChunk, 1.0));
long scale2 = scale1 + 10000;
ArrayList<Integer> sealedSegments1 = Lists.newArrayList(1, 2, 5);
response = store.startScale(SCOPE, streamName, sealedSegments1, newRanges, scale2, false, context, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments1, segmentsCreated, response.getActiveEpoch(), scale2, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), scale2, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 6, 7, 8).contains(x.getNumber())));
// 7 -> 9 = .3 - .35, 10 = .35 - .6
// 8 -> 10 = .35 - .6, 11 = .6 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(0.3, 0.35), new AbstractMap.SimpleEntry<>(0.35, 3 * keyChunk), new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale3 = scale2 + 10000;
ArrayList<Integer> sealedSegments2 = Lists.newArrayList(7, 8);
response = store.startScale(SCOPE, streamName, sealedSegments2, newRanges, scale3, false, context, executor).get();
segmentsCreated = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments2, segmentsCreated, response.getActiveEpoch(), scale3, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments2.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), scale3, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 6, 9, 10, 11).contains(x.getNumber())));
Map<Integer, List<Integer>> successors = store.getSuccessors(SCOPE, streamName, 0, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 1, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(6) && successors.get(6).containsAll(Collections.singleton(1)) && successors.containsKey(7) && successors.get(7).containsAll(Collections.singleton(1)));
successors = store.getSuccessors(SCOPE, streamName, 2, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(8) && successors.get(8).containsAll(Lists.newArrayList(2, 5)));
successors = store.getSuccessors(SCOPE, streamName, 3, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(5) && successors.get(5).containsAll(Lists.newArrayList(3, 4)));
successors = store.getSuccessors(SCOPE, streamName, 4, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(5) && successors.get(5).containsAll(Lists.newArrayList(3, 4)));
successors = store.getSuccessors(SCOPE, streamName, 5, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(8) && successors.get(8).containsAll(Lists.newArrayList(2, 5)));
successors = store.getSuccessors(SCOPE, streamName, 6, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 7, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(9) && successors.get(9).containsAll(Collections.singleton(7)) && successors.containsKey(10) && successors.get(10).containsAll(Lists.newArrayList(7, 8)));
successors = store.getSuccessors(SCOPE, streamName, 8, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(11) && successors.get(11).containsAll(Collections.singleton(8)) && successors.containsKey(10) && successors.get(10).containsAll(Lists.newArrayList(7, 8)));
successors = store.getSuccessors(SCOPE, streamName, 9, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 10, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 11, context, executor).get();
assertTrue(successors.isEmpty());
// start -1
List<Integer> historicalSegments = store.getActiveSegments(SCOPE, streamName, start - 1, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 3, 4)));
// start + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, start + 1, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 3, 4)));
// scale1 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale1 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 4);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 5)));
// scale2 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale2 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 4);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 7, 8)));
// scale3 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale3 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 9, 10, 11)));
// scale 3 + 10000
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale3 + 10000, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 9, 10, 11)));
assertFalse(store.isSealed(SCOPE, streamName, context, executor).get());
assertNotEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
Boolean sealOperationStatus = store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(sealOperationStatus);
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal an already sealed stream.
Boolean sealOperationStatus1 = store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(sealOperationStatus1);
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal a non existing stream.
try {
store.setSealed(SCOPE, "nonExistentStream", null, executor).get();
} catch (Exception e) {
assertEquals(StoreException.DataNotFoundException.class, e.getCause().getClass());
}
store.markCold(SCOPE, streamName, 0, System.currentTimeMillis() + 1000, null, executor).get();
assertTrue(store.isCold(SCOPE, streamName, 0, null, executor).get());
Thread.sleep(1000);
assertFalse(store.isCold(SCOPE, streamName, 0, null, executor).get());
store.markCold(SCOPE, streamName, 0, System.currentTimeMillis() + 1000, null, executor).get();
store.removeMarker(SCOPE, streamName, 0, null, executor).get();
assertFalse(store.isCold(SCOPE, streamName, 0, null, executor).get());
}
use of io.pravega.client.stream.ScalingPolicy in project pravega by pravega.
the class IntermittentCnxnFailureTest method createStreamTest.
@Test
public void createStreamTest() throws Exception {
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
// start stream creation in background/asynchronously.
// the connection to server will fail and should be retried
controllerService.createStream(configuration1, System.currentTimeMillis());
// we should get illegalStateException
try {
Retry.withExpBackoff(10, 10, 4).retryingOn(StoreException.DataNotFoundException.class).throwingOn(IllegalStateException.class).run(() -> {
Futures.getAndHandleExceptions(streamStore.getConfiguration(SCOPE, stream1, null, executor), CompletionException::new);
return null;
});
} catch (CompletionException ex) {
Assert.assertEquals(Exceptions.unwrap(ex).getMessage(), "stream state unknown");
assertEquals(Exceptions.unwrap(ex).getClass(), IllegalStateException.class);
}
// Mock createSegment to return success.
doReturn(CompletableFuture.completedFuture(true)).when(segmentHelperMock).createSegment(anyString(), anyString(), anyInt(), any(), any(), any(), any());
AtomicBoolean result = new AtomicBoolean(false);
Retry.withExpBackoff(10, 10, 4).retryingOn(IllegalStateException.class).throwingOn(RuntimeException.class).run(() -> {
Futures.getAndHandleExceptions(streamStore.getConfiguration(SCOPE, stream1, null, executor).thenAccept(configuration -> result.set(configuration.equals(configuration1))), CompletionException::new);
return null;
});
assertTrue(result.get());
}
use of io.pravega.client.stream.ScalingPolicy in project pravega by pravega.
the class StreamMetadataTasksTest method timeBasedRetentionStreamTest.
@Test(timeout = 30000)
public void timeBasedRetentionStreamTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(2);
final RetentionPolicy retentionPolicy = RetentionPolicy.builder().retentionType(RetentionPolicy.RetentionType.TIME).retentionParam(Duration.ofMinutes(60).toMillis()).build();
final StreamConfiguration configuration = StreamConfiguration.builder().scope(SCOPE).streamName("test").scalingPolicy(policy).retentionPolicy(retentionPolicy).build();
streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
assertNotEquals(0, consumer.getCurrentSegments(SCOPE, "test").get().size());
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
long recordingTime1 = System.currentTimeMillis();
Map<Integer, Long> map1 = new HashMap<>();
map1.put(0, 1L);
map1.put(1, 1L);
StreamCutRecord streamCut1 = new StreamCutRecord(recordingTime1, Long.MIN_VALUE, map1);
doReturn(CompletableFuture.completedFuture(streamCut1)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime1, null, "").get();
// verify that one streamCut is generated and added.
List<StreamCutRecord> list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
assertTrue(list.contains(streamCut1));
Map<Integer, Long> map2 = new HashMap<>();
map2.put(0, 10L);
map2.put(1, 10L);
long recordingTime2 = recordingTime1 + Duration.ofMinutes(5).toMillis();
StreamCutRecord streamCut2 = new StreamCutRecord(recordingTime2, Long.MIN_VALUE, map2);
doReturn(CompletableFuture.completedFuture(streamCut2)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime2, null, "").get();
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
StreamProperty<StreamTruncationRecord> truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
// verify that only one stream cut is in retention set. streamCut2 is not added
// verify that truncation did not happen
assertTrue(list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(!truncProp.isUpdating());
Map<Integer, Long> map3 = new HashMap<>();
map3.put(0, 20L);
map3.put(1, 20L);
long recordingTime3 = recordingTime1 + Duration.ofMinutes(Config.MINIMUM_RETENTION_FREQUENCY_IN_MINUTES).toMillis() + 1;
StreamCutRecord streamCut3 = new StreamCutRecord(recordingTime3, Long.MIN_VALUE, map3);
doReturn(CompletableFuture.completedFuture(streamCut3)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime3, null, "").get();
// verify two stream cuts are in retention set. Cut 1 and 3.
// verify that Truncation not not happened.
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
assertTrue(list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(list.contains(streamCut3));
assertTrue(!truncProp.isUpdating());
Map<Integer, Long> map4 = new HashMap<>();
map4.put(0, 20L);
map4.put(1, 20L);
long recordingTime4 = recordingTime1 + retentionPolicy.getRetentionParam() + 2;
StreamCutRecord streamCut4 = new StreamCutRecord(recordingTime4, Long.MIN_VALUE, map4);
doReturn(CompletableFuture.completedFuture(streamCut4)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime4, null, "").get();
// verify that only two stream cut are in retention set. streamcut 3 and 4
// verify that truncation has started. verify that streamCut1 is removed from retention set as that has been used for truncation
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
assertTrue(!list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(list.contains(streamCut3));
assertTrue(list.contains(streamCut4));
assertTrue(truncProp.isUpdating());
assertTrue(truncProp.getProperty().getStreamCut().get(0) == 1L && truncProp.getProperty().getStreamCut().get(1) == 1L);
}
use of io.pravega.client.stream.ScalingPolicy in project pravega by pravega.
the class StreamMetadataTasksTest method manualScaleTest.
@Test(timeout = 30000)
public void manualScaleTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(1);
final StreamConfiguration configuration = StreamConfiguration.builder().scope(SCOPE).streamName("test").scalingPolicy(policy).build();
streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
List<AbstractMap.SimpleEntry<Double, Double>> newRanges = new ArrayList<>();
newRanges.add(new AbstractMap.SimpleEntry<>(0.0, 0.5));
newRanges.add(new AbstractMap.SimpleEntry<>(0.5, 1.0));
ScaleResponse scaleOpResult = streamMetadataTasks.manualScale(SCOPE, "test", Collections.singletonList(0), newRanges, 30, null).get();
assertEquals(ScaleStreamStatus.STARTED, scaleOpResult.getStatus());
OperationContext context = streamStorePartialMock.createContext(SCOPE, "test");
assertEquals(streamStorePartialMock.getState(SCOPE, "test", false, context, executor).get(), State.ACTIVE);
// Now when startScale runs even after that we should get the state as active.
StartScaleResponse response = streamStorePartialMock.startScale(SCOPE, "test", Collections.singletonList(0), newRanges, 30, true, null, executor).get();
assertEquals(response.getActiveEpoch(), 0);
assertEquals(streamStorePartialMock.getState(SCOPE, "test", true, context, executor).get(), State.ACTIVE);
AssertExtensions.assertThrows("", () -> streamStorePartialMock.scaleNewSegmentsCreated(SCOPE, "test", Collections.singletonList(0), response.getSegmentsCreated(), response.getActiveEpoch(), 30, context, executor).get(), ex -> Exceptions.unwrap(ex) instanceof StoreException.IllegalStateException);
List<Segment> segments = streamMetadataTasks.startScale((ScaleOpEvent) requestEventWriter.getEventQueue().take(), true, context, "").get();
assertTrue(segments.stream().anyMatch(x -> x.getNumber() == 1 && x.getKeyStart() == 0.0 && x.getKeyEnd() == 0.5));
assertTrue(segments.stream().anyMatch(x -> x.getNumber() == 2 && x.getKeyStart() == 0.5 && x.getKeyEnd() == 1.0));
}
use of io.pravega.client.stream.ScalingPolicy in project pravega by pravega.
the class StreamMetadataTasksTest method setup.
@Before
public void setup() throws Exception {
zkServer = new TestingServerStarter().start();
zkServer.start();
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
StreamMetadataStore streamStore = StreamStoreFactory.createInMemoryStore(1, executor);
// create a partial mock.
streamStorePartialMock = spy(streamStore);
doReturn(CompletableFuture.completedFuture(false)).when(streamStorePartialMock).isTransactionOngoing(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
HostControllerStore hostStore = HostStoreFactory.createInMemoryStore(HostMonitorConfigImpl.dummyConfig());
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().build());
streamMetadataTasks = spy(new StreamMetadataTasks(streamStorePartialMock, hostStore, taskMetadataStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key"));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStorePartialMock, hostStore, segmentHelperMock, executor, "host", connectionFactory, authEnabled, "key");
this.streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStorePartialMock, executor), new ScaleOperationTask(streamMetadataTasks, streamStorePartialMock, executor), new UpdateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new SealStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new DeleteStreamTask(streamMetadataTasks, streamStorePartialMock, executor), new TruncateStreamTask(streamMetadataTasks, streamStorePartialMock, executor), executor);
consumer = new ControllerService(streamStorePartialMock, hostStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null);
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
streamStorePartialMock.createScope(SCOPE).join();
long start = System.currentTimeMillis();
streamStorePartialMock.createStream(SCOPE, stream1, configuration1, start, null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
StartScaleResponse response = streamStorePartialMock.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, false, null, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
streamStorePartialMock.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStorePartialMock.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
streamStorePartialMock.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), start + 20, null, executor).get();
}
Aggregations