use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class EndToEndWithScaleTest method testScale.
@Test(timeout = 30000)
public void testScale() throws Exception {
StreamConfiguration config = StreamConfiguration.builder().scope("test").streamName("test").scalingPolicy(ScalingPolicy.byEventRate(10, 2, 1)).build();
Controller controller = controllerWrapper.getController();
controllerWrapper.getControllerService().createScope("test").get();
controller.createStream(config).get();
@Cleanup ConnectionFactory connectionFactory = new ConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
@Cleanup ClientFactory clientFactory = new ClientFactoryImpl("test", controller, connectionFactory);
@Cleanup EventStreamWriter<String> writer = clientFactory.createEventWriter("test", new JavaSerializer<>(), EventWriterConfig.builder().build());
writer.writeEvent("0", "txntest1").get();
// scale
Stream stream = new StreamImpl("test", "test");
Map<Double, Double> map = new HashMap<>();
map.put(0.0, 0.33);
map.put(0.33, 0.66);
map.put(0.66, 1.0);
Boolean result = controller.scaleStream(stream, Collections.singletonList(0), map, executor).getFuture().get();
assertTrue(result);
writer.writeEvent("0", "txntest2").get();
@Cleanup ReaderGroupManager groupManager = new ReaderGroupManagerImpl("test", controller, clientFactory, connectionFactory);
groupManager.createReaderGroup("reader", ReaderGroupConfig.builder().disableAutomaticCheckpoints().stream("test/test").build());
@Cleanup EventStreamReader<String> reader = clientFactory.createReader("readerId", "reader", new JavaSerializer<>(), ReaderConfig.builder().build());
EventRead<String> event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest1", event.getEvent());
event = reader.readNextEvent(10000);
assertNotNull(event);
assertEquals("txntest2", event.getEvent());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class ZkStreamTest method testZkStream.
@Test
public void testZkStream() throws Exception {
double keyChunk = 1.0 / 5;
final ScalingPolicy policy = ScalingPolicy.fixed(5);
final StreamMetadataStore store = new ZKStreamMetadataStore(cli, executor);
final String streamName = "test";
store.createScope(SCOPE).get();
StreamConfiguration streamConfig = StreamConfiguration.builder().scope(streamName).streamName(streamName).scalingPolicy(policy).build();
store.createStream(SCOPE, streamName, streamConfig, System.currentTimeMillis(), null, executor).get();
store.setState(SCOPE, streamName, State.ACTIVE, null, executor).get();
OperationContext context = store.createContext(SCOPE, streamName);
List<Segment> segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 1, 2, 3, 4).contains(x.getNumber())));
long start = segments.get(0).getStart();
assertEquals(store.getConfiguration(SCOPE, streamName, context, executor).get(), streamConfig);
List<AbstractMap.SimpleEntry<Double, Double>> newRanges;
// existing range 0 = 0 - .2, 1 = .2 - .4, 2 = .4 - .6, 3 = .6 - .8, 4 = .8 - 1.0
// 3, 4 -> 5 = .6 - 1.0
newRanges = Collections.singletonList(new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale1 = start + 10000;
ArrayList<Integer> sealedSegments = Lists.newArrayList(3, 4);
StartScaleResponse response = store.startScale(SCOPE, streamName, sealedSegments, newRanges, scale1, false, context, executor).get();
List<Segment> newSegments = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments, newSegments, response.getActiveEpoch(), scale1, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), newSegments, response.getActiveEpoch(), scale1, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 1, 2, 5).contains(x.getNumber())));
// 1 -> 6 = 0.2 -.3, 7 = .3 - .4
// 2,5 -> 8 = .4 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(keyChunk, 0.3), new AbstractMap.SimpleEntry<>(0.3, 2 * keyChunk), new AbstractMap.SimpleEntry<>(2 * keyChunk, 1.0));
long scale2 = scale1 + 10000;
ArrayList<Integer> sealedSegments1 = Lists.newArrayList(1, 2, 5);
response = store.startScale(SCOPE, streamName, sealedSegments1, newRanges, scale2, false, context, executor).get();
List<Segment> segmentsCreated = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments1, segmentsCreated, response.getActiveEpoch(), scale2, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), scale2, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 4);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 6, 7, 8).contains(x.getNumber())));
// 7 -> 9 = .3 - .35, 10 = .35 - .6
// 8 -> 10 = .35 - .6, 11 = .6 - 1.0
newRanges = Arrays.asList(new AbstractMap.SimpleEntry<>(0.3, 0.35), new AbstractMap.SimpleEntry<>(0.35, 3 * keyChunk), new AbstractMap.SimpleEntry<>(3 * keyChunk, 1.0));
long scale3 = scale2 + 10000;
ArrayList<Integer> sealedSegments2 = Lists.newArrayList(7, 8);
response = store.startScale(SCOPE, streamName, sealedSegments2, newRanges, scale3, false, context, executor).get();
segmentsCreated = response.getSegmentsCreated();
store.setState(SCOPE, streamName, State.SCALING, null, executor).join();
store.scaleNewSegmentsCreated(SCOPE, streamName, sealedSegments2, segmentsCreated, response.getActiveEpoch(), scale3, context, executor).get();
store.scaleSegmentsSealed(SCOPE, streamName, sealedSegments2.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentsCreated, response.getActiveEpoch(), scale3, context, executor).get();
segments = store.getActiveSegments(SCOPE, streamName, context, executor).get();
assertEquals(segments.size(), 5);
assertTrue(segments.stream().allMatch(x -> Lists.newArrayList(0, 6, 9, 10, 11).contains(x.getNumber())));
Map<Integer, List<Integer>> successors = store.getSuccessors(SCOPE, streamName, 0, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 1, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(6) && successors.get(6).containsAll(Collections.singleton(1)) && successors.containsKey(7) && successors.get(7).containsAll(Collections.singleton(1)));
successors = store.getSuccessors(SCOPE, streamName, 2, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(8) && successors.get(8).containsAll(Lists.newArrayList(2, 5)));
successors = store.getSuccessors(SCOPE, streamName, 3, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(5) && successors.get(5).containsAll(Lists.newArrayList(3, 4)));
successors = store.getSuccessors(SCOPE, streamName, 4, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(5) && successors.get(5).containsAll(Lists.newArrayList(3, 4)));
successors = store.getSuccessors(SCOPE, streamName, 5, context, executor).get();
assertTrue(successors.size() == 1 && successors.containsKey(8) && successors.get(8).containsAll(Lists.newArrayList(2, 5)));
successors = store.getSuccessors(SCOPE, streamName, 6, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 7, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(9) && successors.get(9).containsAll(Collections.singleton(7)) && successors.containsKey(10) && successors.get(10).containsAll(Lists.newArrayList(7, 8)));
successors = store.getSuccessors(SCOPE, streamName, 8, context, executor).get();
assertTrue(successors.size() == 2 && successors.containsKey(11) && successors.get(11).containsAll(Collections.singleton(8)) && successors.containsKey(10) && successors.get(10).containsAll(Lists.newArrayList(7, 8)));
successors = store.getSuccessors(SCOPE, streamName, 9, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 10, context, executor).get();
assertTrue(successors.isEmpty());
successors = store.getSuccessors(SCOPE, streamName, 11, context, executor).get();
assertTrue(successors.isEmpty());
// start -1
List<Integer> historicalSegments = store.getActiveSegments(SCOPE, streamName, start - 1, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 3, 4)));
// start + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, start + 1, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 3, 4)));
// scale1 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale1 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 4);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 1, 2, 5)));
// scale2 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale2 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 4);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 7, 8)));
// scale3 + 1
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale3 + 1000, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 9, 10, 11)));
// scale 3 + 10000
historicalSegments = store.getActiveSegments(SCOPE, streamName, scale3 + 10000, context, executor).get();
assertEquals(historicalSegments.size(), 5);
assertTrue(historicalSegments.containsAll(Lists.newArrayList(0, 6, 9, 10, 11)));
assertFalse(store.isSealed(SCOPE, streamName, context, executor).get());
assertNotEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
Boolean sealOperationStatus = store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(sealOperationStatus);
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal an already sealed stream.
Boolean sealOperationStatus1 = store.setSealed(SCOPE, streamName, context, executor).get();
assertTrue(sealOperationStatus1);
assertTrue(store.isSealed(SCOPE, streamName, context, executor).get());
assertEquals(0, store.getActiveSegments(SCOPE, streamName, context, executor).get().size());
// seal a non existing stream.
try {
store.setSealed(SCOPE, "nonExistentStream", null, executor).get();
} catch (Exception e) {
assertEquals(StoreException.DataNotFoundException.class, e.getCause().getClass());
}
store.markCold(SCOPE, streamName, 0, System.currentTimeMillis() + 1000, null, executor).get();
assertTrue(store.isCold(SCOPE, streamName, 0, null, executor).get());
Thread.sleep(1000);
assertFalse(store.isCold(SCOPE, streamName, 0, null, executor).get());
store.markCold(SCOPE, streamName, 0, System.currentTimeMillis() + 1000, null, executor).get();
store.removeMarker(SCOPE, streamName, 0, null, executor).get();
assertFalse(store.isCold(SCOPE, streamName, 0, null, executor).get());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class IntermittentCnxnFailureTest method createStreamTest.
@Test
public void createStreamTest() throws Exception {
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
// start stream creation in background/asynchronously.
// the connection to server will fail and should be retried
controllerService.createStream(configuration1, System.currentTimeMillis());
// we should get illegalStateException
try {
Retry.withExpBackoff(10, 10, 4).retryingOn(StoreException.DataNotFoundException.class).throwingOn(IllegalStateException.class).run(() -> {
Futures.getAndHandleExceptions(streamStore.getConfiguration(SCOPE, stream1, null, executor), CompletionException::new);
return null;
});
} catch (CompletionException ex) {
Assert.assertEquals(Exceptions.unwrap(ex).getMessage(), "stream state unknown");
assertEquals(Exceptions.unwrap(ex).getClass(), IllegalStateException.class);
}
// Mock createSegment to return success.
doReturn(CompletableFuture.completedFuture(true)).when(segmentHelperMock).createSegment(anyString(), anyString(), anyInt(), any(), any(), any(), any());
AtomicBoolean result = new AtomicBoolean(false);
Retry.withExpBackoff(10, 10, 4).retryingOn(IllegalStateException.class).throwingOn(RuntimeException.class).run(() -> {
Futures.getAndHandleExceptions(streamStore.getConfiguration(SCOPE, stream1, null, executor).thenAccept(configuration -> result.set(configuration.equals(configuration1))), CompletionException::new);
return null;
});
assertTrue(result.get());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class StreamMetadataTasksTest method updateStreamTest.
@Test(timeout = 30000)
public void updateStreamTest() throws Exception {
assertNotEquals(0, consumer.getCurrentSegments(SCOPE, stream1).get().size());
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
StreamConfiguration streamConfiguration = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(ScalingPolicy.fixed(5)).build();
StreamProperty<StreamConfiguration> configProp = streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).join();
assertFalse(configProp.isUpdating());
// 1. happy day test
// update.. should succeed
CompletableFuture<UpdateStreamStatus.Status> updateOperationFuture = streamMetadataTasks.updateStream(SCOPE, stream1, streamConfiguration, null);
assertTrue(Futures.await(processEvent(requestEventWriter)));
assertEquals(UpdateStreamStatus.Status.SUCCESS, updateOperationFuture.join());
configProp = streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).join();
assertTrue(configProp.getProperty().equals(streamConfiguration));
streamConfiguration = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(ScalingPolicy.fixed(6)).build();
// 2. change state to scaling
streamStorePartialMock.setState(SCOPE, stream1, State.SCALING, null, executor).get();
// call update should fail without posting the event
streamMetadataTasks.updateStream(SCOPE, stream1, streamConfiguration, null);
AtomicBoolean loop = new AtomicBoolean(false);
Futures.loop(() -> !loop.get(), () -> streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).thenApply(StreamProperty::isUpdating).thenAccept(loop::set), executor).join();
// event posted, first step performed. now pick the event for processing
UpdateStreamTask updateStreamTask = new UpdateStreamTask(streamMetadataTasks, streamStorePartialMock, executor);
UpdateStreamEvent taken = (UpdateStreamEvent) requestEventWriter.eventQueue.take();
AssertExtensions.assertThrows("", updateStreamTask.execute(taken), e -> Exceptions.unwrap(e) instanceof StoreException.OperationNotAllowedException);
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
// now with state = active, process the same event. it should succeed now.
assertTrue(Futures.await(updateStreamTask.execute(taken)));
// 3. multiple back to back updates.
StreamConfiguration streamConfiguration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(ScalingPolicy.byEventRate(1, 1, 2)).build();
CompletableFuture<UpdateStreamStatus.Status> updateOperationFuture1 = streamMetadataTasks.updateStream(SCOPE, stream1, streamConfiguration1, null);
// ensure that previous updatestream has posted the event and set status to updating,
// only then call second updateStream
AtomicBoolean loop2 = new AtomicBoolean(false);
Futures.loop(() -> !loop2.get(), () -> streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).thenApply(StreamProperty::isUpdating).thenAccept(loop2::set), executor).join();
configProp = streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).join();
assertTrue(configProp.getProperty().equals(streamConfiguration1) && configProp.isUpdating());
StreamConfiguration streamConfiguration2 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(ScalingPolicy.fixed(7)).build();
// post the second update request. This should fail here itself as previous one has started.
CompletableFuture<UpdateStreamStatus.Status> updateOperationFuture2 = streamMetadataTasks.updateStream(SCOPE, stream1, streamConfiguration2, null);
assertEquals(UpdateStreamStatus.Status.FAILURE, updateOperationFuture2.join());
// process event
assertTrue(Futures.await(processEvent(requestEventWriter)));
// verify that first request for update also completes with success.
assertEquals(UpdateStreamStatus.Status.SUCCESS, updateOperationFuture1.join());
configProp = streamStorePartialMock.getConfigurationProperty(SCOPE, stream1, true, null, executor).join();
assertTrue(configProp.getProperty().equals(streamConfiguration1) && !configProp.isUpdating());
}
use of io.pravega.client.stream.StreamConfiguration in project pravega by pravega.
the class StreamMetadataTasksTest method timeBasedRetentionStreamTest.
@Test(timeout = 30000)
public void timeBasedRetentionStreamTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(2);
final RetentionPolicy retentionPolicy = RetentionPolicy.builder().retentionType(RetentionPolicy.RetentionType.TIME).retentionParam(Duration.ofMinutes(60).toMillis()).build();
final StreamConfiguration configuration = StreamConfiguration.builder().scope(SCOPE).streamName("test").scalingPolicy(policy).retentionPolicy(retentionPolicy).build();
streamStorePartialMock.createStream(SCOPE, "test", configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, "test", State.ACTIVE, null, executor).get();
assertNotEquals(0, consumer.getCurrentSegments(SCOPE, "test").get().size());
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
long recordingTime1 = System.currentTimeMillis();
Map<Integer, Long> map1 = new HashMap<>();
map1.put(0, 1L);
map1.put(1, 1L);
StreamCutRecord streamCut1 = new StreamCutRecord(recordingTime1, Long.MIN_VALUE, map1);
doReturn(CompletableFuture.completedFuture(streamCut1)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime1, null, "").get();
// verify that one streamCut is generated and added.
List<StreamCutRecord> list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
assertTrue(list.contains(streamCut1));
Map<Integer, Long> map2 = new HashMap<>();
map2.put(0, 10L);
map2.put(1, 10L);
long recordingTime2 = recordingTime1 + Duration.ofMinutes(5).toMillis();
StreamCutRecord streamCut2 = new StreamCutRecord(recordingTime2, Long.MIN_VALUE, map2);
doReturn(CompletableFuture.completedFuture(streamCut2)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime2, null, "").get();
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
StreamProperty<StreamTruncationRecord> truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
// verify that only one stream cut is in retention set. streamCut2 is not added
// verify that truncation did not happen
assertTrue(list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(!truncProp.isUpdating());
Map<Integer, Long> map3 = new HashMap<>();
map3.put(0, 20L);
map3.put(1, 20L);
long recordingTime3 = recordingTime1 + Duration.ofMinutes(Config.MINIMUM_RETENTION_FREQUENCY_IN_MINUTES).toMillis() + 1;
StreamCutRecord streamCut3 = new StreamCutRecord(recordingTime3, Long.MIN_VALUE, map3);
doReturn(CompletableFuture.completedFuture(streamCut3)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), // mock only isTransactionOngoing call.
any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime3, null, "").get();
// verify two stream cuts are in retention set. Cut 1 and 3.
// verify that Truncation not not happened.
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
assertTrue(list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(list.contains(streamCut3));
assertTrue(!truncProp.isUpdating());
Map<Integer, Long> map4 = new HashMap<>();
map4.put(0, 20L);
map4.put(1, 20L);
long recordingTime4 = recordingTime1 + retentionPolicy.getRetentionParam() + 2;
StreamCutRecord streamCut4 = new StreamCutRecord(recordingTime4, Long.MIN_VALUE, map4);
doReturn(CompletableFuture.completedFuture(streamCut4)).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), any());
streamMetadataTasks.retention(SCOPE, "test", retentionPolicy, recordingTime4, null, "").get();
// verify that only two stream cut are in retention set. streamcut 3 and 4
// verify that truncation has started. verify that streamCut1 is removed from retention set as that has been used for truncation
list = streamStorePartialMock.getStreamCutsFromRetentionSet(SCOPE, "test", null, executor).get();
truncProp = streamStorePartialMock.getTruncationProperty(SCOPE, "test", true, null, executor).get();
assertTrue(!list.contains(streamCut1));
assertTrue(!list.contains(streamCut2));
assertTrue(list.contains(streamCut3));
assertTrue(list.contains(streamCut4));
assertTrue(truncProp.isUpdating());
assertTrue(truncProp.getProperty().getStreamCut().get(0) == 1L && truncProp.getProperty().getStreamCut().get(1) == 1L);
}
Aggregations