use of io.pravega.client.stream.impl.StreamCutImpl in project pravega by pravega.
the class StreamMetadataTasksTest method consumptionBasedRetentionTimeLimitTest.
@Test(timeout = 30000)
public void consumptionBasedRetentionTimeLimitTest() throws Exception {
final ScalingPolicy policy = ScalingPolicy.fixed(2);
final RetentionPolicy retentionPolicy = RetentionPolicy.byTime(Duration.ofMillis(1L), Duration.ofMillis(10L));
String stream1 = "consumptionTime";
final StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(policy).retentionPolicy(retentionPolicy).build();
streamStorePartialMock.createStream(SCOPE, stream1, configuration, System.currentTimeMillis(), null, executor).get();
streamStorePartialMock.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
assertNotEquals(0, consumer.getCurrentSegments(SCOPE, stream1, 0L).get().size());
WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
streamMetadataTasks.setRequestEventWriter(requestEventWriter);
streamMetadataTasks.setRetentionFrequencyMillis(1L);
AtomicLong time = new AtomicLong(0L);
streamMetadataTasks.setRetentionClock(time::get);
// region case 1: basic retention
final Segment seg0 = new Segment(SCOPE, stream1, 0L);
final Segment seg1 = new Segment(SCOPE, stream1, 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 0L, seg1, 0L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(Stream.of(SCOPE, stream1), new StreamCutImpl(Stream.of(SCOPE, stream1), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 2000L, seg1, 3000L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(Stream.of(SCOPE, stream1), new StreamCutImpl(Stream.of(SCOPE, stream1), endStreamCut));
ReaderGroupConfig consumpRGConfig = ReaderGroupConfig.builder().automaticCheckpointIntervalMillis(30000L).groupRefreshTimeMillis(20000L).maxOutstandingCheckpointRequest(2).retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT).startingStreamCuts(startSC).endingStreamCuts(endSC).build();
consumpRGConfig = ReaderGroupConfig.cloneConfig(consumpRGConfig, UUID.randomUUID(), 0L);
doReturn(CompletableFuture.completedFuture(Controller.CreateStreamStatus.Status.SUCCESS)).when(streamMetadataTasks).createRGStream(anyString(), anyString(), any(), anyLong(), anyInt(), anyLong());
String subscriber1 = "subscriber1";
CompletableFuture<Controller.CreateReaderGroupResponse> createStatus = streamMetadataTasks.createReaderGroup(SCOPE, subscriber1, consumpRGConfig, System.currentTimeMillis(), 0L);
assertTrue(Futures.await(processEvent(requestEventWriter)));
Controller.CreateReaderGroupResponse createResponse1 = createStatus.join();
assertEquals(Controller.CreateReaderGroupResponse.Status.SUCCESS, createResponse1.getStatus());
assertFalse(ReaderGroupConfig.DEFAULT_UUID.toString().equals(createResponse1.getConfig().getReaderGroupId()));
assertEquals(0L, createResponse1.getConfig().getGeneration());
String subscriber2 = "subscriber2";
createStatus = streamMetadataTasks.createReaderGroup(SCOPE, subscriber2, consumpRGConfig, System.currentTimeMillis(), 0L);
assertTrue(Futures.await(processEvent(requestEventWriter)));
Controller.CreateReaderGroupResponse createResponse2 = createStatus.join();
assertEquals(Controller.CreateReaderGroupResponse.Status.SUCCESS, createResponse2.getStatus());
assertFalse(ReaderGroupConfig.DEFAULT_UUID.toString().equals(createResponse2.getConfig().getReaderGroupId()));
assertEquals(0L, createResponse2.getConfig().getGeneration());
final String subscriber1Name = NameUtils.getScopedReaderGroupName(SCOPE, subscriber1);
final String subscriber2Name = NameUtils.getScopedReaderGroupName(SCOPE, subscriber2);
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber1Name, createResponse1.getConfig().getReaderGroupId(), createResponse1.getConfig().getGeneration(), ImmutableMap.of(0L, 2L, 1L, 1L), 0L).join();
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber2Name, createResponse2.getConfig().getReaderGroupId(), createResponse2.getConfig().getGeneration(), ImmutableMap.of(0L, 1L, 1L, 2L), 0L).join();
Map<Long, Long> map1 = new HashMap<>();
map1.put(0L, 2L);
map1.put(1L, 2L);
long size = streamStorePartialMock.getSizeTillStreamCut(SCOPE, stream1, map1, Optional.empty(), null, executor).join();
doReturn(CompletableFuture.completedFuture(new StreamCutRecord(time.get(), size, ImmutableMap.copyOf(map1)))).when(streamMetadataTasks).generateStreamCut(anyString(), anyString(), any(), any(), any());
// call retention and verify that retention policy applies
streamMetadataTasks.retention(SCOPE, stream1, retentionPolicy, time.get(), null, "").join();
// now retention set has one stream cut 0/2, 1/2, recording time 1L
// subscriber lowerbound is 0/1, 1/1.. trucation should not happen as this lowerbound is ahead of min retention streamcut.
VersionedMetadata<StreamTruncationRecord> truncationRecord = streamStorePartialMock.getTruncationRecord(SCOPE, stream1, null, executor).join();
assertFalse(truncationRecord.getObject().isUpdating());
// endregion
// region case 2 min policy check
// subscriber streamcut > min time streamcut
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(2L, 4L, ImmutableMap.of(0L, 2L, 1L, 2L)), null, executor).join();
time.set(10L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 20L, ImmutableMap.of(0L, 10L, 1L, 10L)), null, executor).join();
time.set(11L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 20L, ImmutableMap.of(0L, 10L, 1L, 10L)), null, executor).join();
// retentionset: 0L: 0L/2L, 1L/2L... 2L: 0L/2L, 1L/2L... 10L: 0/10, 1/10....11L: 0/10, 1/10.
// update both readers to 0/3, 1/3.
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber1Name, createResponse1.getConfig().getReaderGroupId(), createResponse1.getConfig().getGeneration(), ImmutableMap.of(0L, 3L, 1L, 3L), 0L).join();
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber2Name, createResponse2.getConfig().getReaderGroupId(), createResponse2.getConfig().getGeneration(), ImmutableMap.of(0L, 3L, 1L, 3L), 0L).join();
// new truncation should happen at subscriber lowerbound.
streamMetadataTasks.retention(SCOPE, stream1, retentionPolicy, time.get(), null, "").join();
truncationRecord = streamStorePartialMock.getTruncationRecord(SCOPE, stream1, null, executor).join();
assertEquals(truncationRecord.getObject().getStreamCut().get(0L).longValue(), 3L);
assertEquals(truncationRecord.getObject().getStreamCut().get(1L).longValue(), 3L);
assertTrue(truncationRecord.getObject().isUpdating());
streamStorePartialMock.completeTruncation(SCOPE, stream1, truncationRecord, null, executor).join();
// endregion
// region case 3: min criteria not met on lower bound. truncate at max.
time.set(20L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 22L, ImmutableMap.of(0L, 11L, 1L, 11L)), null, executor).join();
// update both readers to make sure they have read till the latest position - 1. we have set the min limit to 2.
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber1Name, createResponse1.getConfig().getReaderGroupId(), createResponse1.getConfig().getGeneration(), ImmutableMap.of(0L, 11L, 1L, 11L), 0L).join();
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber2Name, createResponse2.getConfig().getReaderGroupId(), createResponse2.getConfig().getGeneration(), ImmutableMap.of(0L, 11L, 1L, 11L), 0L).join();
streamMetadataTasks.retention(SCOPE, stream1, retentionPolicy, time.get(), null, "").join();
// retentionset: 0L: 0L/2L, 1L/2L... 2L: 0L/2L, 1L/2L... 10L: 0/10, 1/10....11L: 0/10, 1/10... 20L: 0/11, 1/11
// subscriber lowerbound is 0/11, 1/11
truncationRecord = streamStorePartialMock.getTruncationRecord(SCOPE, stream1, null, executor).join();
// truncate at limit min
assertEquals(truncationRecord.getObject().getStreamCut().get(0L).longValue(), 10L);
assertEquals(truncationRecord.getObject().getStreamCut().get(1L).longValue(), 10L);
assertTrue(truncationRecord.getObject().isUpdating());
streamStorePartialMock.completeTruncation(SCOPE, stream1, truncationRecord, null, executor).join();
// endregion
// region case 4: lowerbound behind max
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(30L, 40L, ImmutableMap.of(0L, 20L, 1L, 20L)), null, executor).join();
time.set(40L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 42L, ImmutableMap.of(0L, 21L, 1L, 21L)), null, executor).join();
// update both readers to make sure they have read till the latest position - 1. we have set the min limit to 2.
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber1Name, consumpRGConfig.getReaderGroupId().toString(), 0L, ImmutableMap.of(0L, 11L, 1L, 11L), 0L).join();
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber2Name, consumpRGConfig.getReaderGroupId().toString(), 0L, ImmutableMap.of(0L, 11L, 1L, 11L), 0L).join();
streamMetadataTasks.retention(SCOPE, stream1, retentionPolicy, time.get(), null, "").join();
// now retention set has five stream cuts 1: 0/2, 1/2...10: 0/10, 1/10... 20: 0/11, 1/11.. 30: 0/20, 1/20.. 40L: 0/21, 1/21
// subscriber lowerbound is 0/11, 1/11 ..
// maxbound = 30. truncate at max
// maxlimit = 30
// lowerbound is behind maxbound. we will truncate at max
truncationRecord = streamStorePartialMock.getTruncationRecord(SCOPE, stream1, null, executor).join();
assertEquals(truncationRecord.getObject().getStreamCut().get(0L).longValue(), 20L);
assertEquals(truncationRecord.getObject().getStreamCut().get(1L).longValue(), 20L);
assertTrue(truncationRecord.getObject().isUpdating());
streamStorePartialMock.completeTruncation(SCOPE, stream1, truncationRecord, null, executor).join();
// endregion
// region case 5: lowerbound overlaps with maxbound
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(50L, 43L, ImmutableMap.of(0L, 21L, 1L, 22L)), null, executor).join();
time.set(59L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 60L, ImmutableMap.of(0L, 30L, 1L, 30L)), null, executor).join();
time.set(60L);
streamStorePartialMock.addStreamCutToRetentionSet(SCOPE, stream1, new StreamCutRecord(time.get(), 60L, ImmutableMap.of(0L, 30L, 1L, 30L)), null, executor).join();
// update both readers to make sure they have read till the latest position - 1. we have set the min limit to 2.
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber1Name, consumpRGConfig.getReaderGroupId().toString(), 0L, ImmutableMap.of(0L, 22L, 1L, 21L), 0L).join();
streamMetadataTasks.updateSubscriberStreamCut(SCOPE, stream1, subscriber2Name, consumpRGConfig.getReaderGroupId().toString(), 0L, ImmutableMap.of(0L, 22L, 1L, 21L), 0L).join();
streamMetadataTasks.retention(SCOPE, stream1, retentionPolicy, time.get(), null, "").join();
// now retention set has eight stream cuts 1: 0/2, 1/2...10: 0/10, 1/10... 20: 0/11, 1/11.. 30: 0/20, 1/20.. 40L: 0/21, 1/21
// 50: 0/21, 1/22 ... 59: 0/30, 1/30.. 60: 0/30, 1/30
// subscriber lowerbound is 0/22, 1/21
// max: 50, limit: 50
// this overlaps with max. so truncate at max (50: 0/21, 1/22)
truncationRecord = streamStorePartialMock.getTruncationRecord(SCOPE, stream1, null, executor).join();
assertEquals(truncationRecord.getObject().getStreamCut().get(0L).longValue(), 21L);
assertEquals(truncationRecord.getObject().getStreamCut().get(1L).longValue(), 22L);
assertTrue(truncationRecord.getObject().isUpdating());
streamStorePartialMock.completeTruncation(SCOPE, stream1, truncationRecord, null, executor).join();
// endregion
}
use of io.pravega.client.stream.impl.StreamCutImpl in project pravega by pravega.
the class LocalControllerTest method testUpdateSubscriberStreamCut.
@Test(timeout = 10000)
public void testUpdateSubscriberStreamCut() throws ExecutionException, InterruptedException {
UUID someId = UUID.randomUUID();
StreamCut streamCut = new StreamCutImpl(new StreamImpl("scope", "stream"), Collections.emptyMap());
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.SUCCESS).build()));
Assert.assertTrue(this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join());
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.FAILURE).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof ControllerFailureException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.STREAM_NOT_FOUND).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.SUBSCRIBER_NOT_FOUND).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
when(this.mockControllerService.updateSubscriberStreamCut(anyString(), anyString(), anyString(), any(), anyLong(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateSubscriberStatus.newBuilder().setStatus(Controller.UpdateSubscriberStatus.Status.GENERATION_MISMATCH).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateSubscriberStreamCut("scope", "stream", "subscriber", someId, 0L, streamCut).join(), ex -> ex instanceof IllegalArgumentException);
}
use of io.pravega.client.stream.impl.StreamCutImpl in project pravega by pravega.
the class LocalControllerTest method testUpdateReaderGroup.
@Test(timeout = 10000)
public void testUpdateReaderGroup() throws ExecutionException, InterruptedException {
final Segment seg0 = new Segment("scope", "stream1", 0L);
final Segment seg1 = new Segment("scope", "stream1", 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 10L, seg1, 10L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(Stream.of("scope", "stream1"), new StreamCutImpl(Stream.of("scope", "stream1"), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 200L, seg1, 300L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(Stream.of("scope", "stream1"), new StreamCutImpl(Stream.of("scope", "stream1"), endStreamCut));
ReaderGroupConfig rgConfig = ReaderGroupConfig.builder().automaticCheckpointIntervalMillis(30000L).groupRefreshTimeMillis(20000L).maxOutstandingCheckpointRequest(2).retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT).startingStreamCuts(startSC).endingStreamCuts(endSC).build();
ReaderGroupConfig config = ReaderGroupConfig.cloneConfig(rgConfig, UUID.randomUUID(), 0L);
when(this.mockControllerService.updateReaderGroup(anyString(), anyString(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateReaderGroupResponse.newBuilder().setStatus(Controller.UpdateReaderGroupResponse.Status.SUCCESS).setGeneration(1L).build()));
Assert.assertNotNull(this.testController.updateReaderGroup("scope", "subscriber", config).join());
when(this.mockControllerService.updateReaderGroup(anyString(), anyString(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateReaderGroupResponse.newBuilder().setStatus(Controller.UpdateReaderGroupResponse.Status.FAILURE).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.updateReaderGroup("scope", "subscriber", config).join(), ex -> ex instanceof ControllerFailureException);
when(this.mockControllerService.updateReaderGroup(anyString(), anyString(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateReaderGroupResponse.newBuilder().setStatus(Controller.UpdateReaderGroupResponse.Status.INVALID_CONFIG).build()));
assertThrows("Expected ReaderGroupConfigRejectedException", () -> this.testController.updateReaderGroup("scope", "subscriber", config).join(), ex -> ex instanceof ReaderGroupConfigRejectedException);
when(this.mockControllerService.updateReaderGroup(anyString(), anyString(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateReaderGroupResponse.newBuilder().setStatus(Controller.UpdateReaderGroupResponse.Status.RG_NOT_FOUND).build()));
assertThrows("Expected IllegalArgumentException", () -> this.testController.updateReaderGroup("scope", "subscriber", config).join(), ex -> ex instanceof IllegalArgumentException);
when(this.mockControllerService.updateReaderGroup(anyString(), anyString(), any(), anyLong())).thenReturn(CompletableFuture.completedFuture(Controller.UpdateReaderGroupResponse.newBuilder().setStatusValue(-1).build()));
assertThrows("Expected ControllerFailureException", () -> this.testController.updateReaderGroup("scope", "subscriber", config).join(), ex -> ex instanceof ControllerFailureException);
}
use of io.pravega.client.stream.impl.StreamCutImpl in project pravega by pravega.
the class ControllerServiceImplTest method createReaderGroupTests.
@Test
public void createReaderGroupTests() {
createScopeAndStream(SCOPE1, STREAM1, ScalingPolicy.fixed(2));
final Segment seg0 = new Segment(SCOPE1, STREAM1, 0L);
final Segment seg1 = new Segment(SCOPE1, STREAM1, 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 10L, seg1, 10L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(Stream.of(SCOPE1, STREAM1), new StreamCutImpl(Stream.of(SCOPE1, STREAM1), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 200L, seg1, 300L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(Stream.of(SCOPE1, STREAM1), new StreamCutImpl(Stream.of(SCOPE1, STREAM1), endStreamCut));
ReaderGroupConfig config = ReaderGroupConfig.builder().automaticCheckpointIntervalMillis(30000L).groupRefreshTimeMillis(20000L).maxOutstandingCheckpointRequest(2).retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT).startingStreamCuts(startSC).endingStreamCuts(endSC).build();
ResultObserver<CreateReaderGroupResponse> result = new ResultObserver<>();
String rgName = "rg_1";
this.controllerService.createReaderGroup(ModelHelper.decode(SCOPE1, rgName, config), result);
CreateReaderGroupResponse createRGStatus = result.get();
assertEquals("Create Reader Group Invalid RG Name", CreateReaderGroupResponse.Status.INVALID_RG_NAME, createRGStatus.getStatus());
ResultObserver<CreateReaderGroupResponse> result1 = new ResultObserver<>();
rgName = "rg1";
this.controllerService.createReaderGroup(ModelHelper.decode("somescope", rgName, config), result1);
createRGStatus = result1.get();
assertEquals("Create Reader Group Scope not found", CreateReaderGroupResponse.Status.SCOPE_NOT_FOUND, createRGStatus.getStatus());
}
use of io.pravega.client.stream.impl.StreamCutImpl in project pravega by pravega.
the class ControllerServiceImplTest method updateReaderGroupTests.
@Test
public void updateReaderGroupTests() {
createScopeAndStream(SCOPE1, STREAM1, ScalingPolicy.fixed(2));
String rgName = "rg1";
UUID rgId = UUID.randomUUID();
createReaderGroup(SCOPE1, STREAM1, rgName, rgId);
final Segment seg0 = new Segment(SCOPE1, STREAM1, 0L);
final Segment seg1 = new Segment(SCOPE1, STREAM1, 1L);
ImmutableMap<Segment, Long> startStreamCut = ImmutableMap.of(seg0, 100L, seg1, 1000L);
Map<Stream, StreamCut> startSC = ImmutableMap.of(Stream.of(SCOPE1, STREAM1), new StreamCutImpl(Stream.of(SCOPE1, STREAM1), startStreamCut));
ImmutableMap<Segment, Long> endStreamCut = ImmutableMap.of(seg0, 2000L, seg1, 3000L);
Map<Stream, StreamCut> endSC = ImmutableMap.of(Stream.of(SCOPE1, STREAM1), new StreamCutImpl(Stream.of(SCOPE1, STREAM1), endStreamCut));
ReaderGroupConfig newConfig = ReaderGroupConfig.builder().automaticCheckpointIntervalMillis(80000L).groupRefreshTimeMillis(40000L).maxOutstandingCheckpointRequest(5).retentionType(ReaderGroupConfig.StreamDataRetention.AUTOMATIC_RELEASE_AT_LAST_CHECKPOINT).startingStreamCuts(startSC).endingStreamCuts(endSC).build();
newConfig = ReaderGroupConfig.cloneConfig(newConfig, rgId, 0L);
ResultObserver<UpdateReaderGroupResponse> result = new ResultObserver<>();
this.controllerService.updateReaderGroup(ModelHelper.decode(SCOPE1, rgName, newConfig), result);
UpdateReaderGroupResponse rgStatus = result.get();
assertEquals("Update Reader Group Status", UpdateReaderGroupResponse.Status.SUCCESS, rgStatus.getStatus());
assertEquals("Updated Generation", 1L, rgStatus.getGeneration());
ResultObserver<UpdateReaderGroupResponse> result1 = new ResultObserver<>();
this.controllerService.updateReaderGroup(ModelHelper.decode(SCOPE1, rgName, newConfig), result1);
rgStatus = result1.get();
assertEquals("Update Reader Group", UpdateReaderGroupResponse.Status.INVALID_CONFIG, rgStatus.getStatus());
ResultObserver<UpdateReaderGroupResponse> result2 = new ResultObserver<>();
this.controllerService.updateReaderGroup(ModelHelper.decode(SCOPE1, "somerg", newConfig), result2);
rgStatus = result2.get();
assertEquals("Update Reader Group", UpdateReaderGroupResponse.Status.RG_NOT_FOUND, rgStatus.getStatus());
}
Aggregations