use of io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask in project pravega by pravega.
the class ControllerServiceWithKVTableTest method setup.
@Before
public void setup() {
segmentHelperMock = SegmentHelperMock.getSegmentHelperMockForTables(executor);
streamStore = spy(getStore());
kvtStore = spy(getKVTStore());
BucketStore bucketStore = StreamStoreFactory.createZKBucketStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(PRAVEGA_ZK_CURATOR_RESOURCE.client, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
GrpcAuthHelper disabledAuthHelper = GrpcAuthHelper.getDisabledAuthHelper();
StreamMetrics.initialize();
TransactionMetrics.initialize();
EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", disabledAuthHelper, helperMock);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", disabledAuthHelper);
kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
}
use of io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask in project pravega by pravega.
the class ControllerServiceWithStreamTest method setup.
@Before
public void setup() {
try {
zkServer = new TestingServerStarter().start();
} catch (Exception e) {
log.error("Error starting ZK server", e);
}
zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
zkClient.start();
streamStore = spy(getStore());
kvtStore = spy(getKVTStore());
BucketStore bucketStore = StreamStoreFactory.createZKBucketStore(zkClient, executor);
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
GrpcAuthHelper disabledAuthHelper = GrpcAuthHelper.getDisabledAuthHelper();
SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
StreamMetrics.initialize();
TransactionMetrics.initialize();
EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", disabledAuthHelper, helperMock);
kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", disabledAuthHelper);
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
}
use of io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask in project pravega by pravega.
the class RequestHandlersTest method testDeleteIgnoreFairness.
@Test
public void testDeleteIgnoreFairness() {
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtTasks, executor), executor);
String fairness = "fairness";
streamStore.createScope(fairness, null, executor).join();
long createTimestamp = System.currentTimeMillis();
streamMetadataTasks.createStream(fairness, fairness, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build(), createTimestamp, 0L).join();
// 1. set segment helper mock to throw exception
doAnswer(x -> Futures.failedFuture(new RuntimeException())).when(segmentHelper).deleteSegment(anyString(), anyString(), anyLong(), anyString(), anyLong());
// 2. start process --> this should fail with a retryable exception while talking to segment store!
streamStore.setState(fairness, fairness, State.SEALED, null, executor).join();
assertEquals(State.SEALED, streamStore.getState(fairness, fairness, true, null, executor).join());
DeleteStreamEvent event = new DeleteStreamEvent(fairness, fairness, 0L, createTimestamp);
AssertExtensions.assertFutureThrows("", streamRequestHandler.process(event, () -> false), e -> Exceptions.unwrap(e) instanceof RuntimeException);
verify(segmentHelper, atLeastOnce()).deleteSegment(anyString(), anyString(), anyLong(), anyString(), anyLong());
// 3. set waiting processor to "random name"
streamStore.createWaitingRequestIfAbsent(fairness, fairness, "myProcessor", null, executor).join();
// 4. reset segment helper to return success
doAnswer(x -> CompletableFuture.completedFuture(null)).when(segmentHelper).deleteSegment(anyString(), anyString(), anyLong(), anyString(), anyLong());
// 5. process again. it should succeed while ignoring waiting processor
streamRequestHandler.process(event, () -> false).join();
AssertExtensions.assertFutureThrows("", streamStore.getState(fairness, fairness, true, null, executor), e -> Exceptions.unwrap(e) instanceof StoreException.DataNotFoundException);
}
use of io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask in project pravega by pravega.
the class ControllerGrpcAuthFocusedTest method setup.
@Before
public void setup() throws IOException {
TaskMetadataStore taskMetadataStore = TaskStoreFactory.createInMemoryStore(EXECUTOR);
StreamMetadataStore streamStore = StreamStoreFactory.createInMemoryStore();
this.kvtStore = spy(KVTableStoreFactory.createInMemoryStore(streamStore, EXECUTOR));
BucketStore bucketStore = StreamStoreFactory.createInMemoryBucketStore();
SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
RequestTracker requestTracker = new RequestTracker(true);
StreamMetrics.initialize();
TransactionMetrics.initialize();
GrpcAuthHelper authHelper = new GrpcAuthHelper(true, "secret", 300);
EventHelper helper = EventHelperMock.getEventHelperMock(EXECUTOR, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelper, EXECUTOR, "host", authHelper, helper);
streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelper, EXECUTOR, "host", authHelper);
kvtMetadataTasks = new TableMetadataTasks(kvtStore, segmentHelper, EXECUTOR, EXECUTOR, "host", authHelper, helper);
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, EXECUTOR), new ScaleOperationTask(streamMetadataTasks, streamStore, EXECUTOR), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, EXECUTOR), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, EXECUTOR), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, EXECUTOR), new TruncateStreamTask(streamMetadataTasks, streamStore, EXECUTOR), new CreateReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, EXECUTOR), EXECUTOR);
streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, EXECUTOR));
streamTransactionMetadataTasks.initializeStreamWriters(new EventStreamWriterMock<>(), new EventStreamWriterMock<>());
Cluster mockCluster = mock(Cluster.class);
when(mockCluster.getClusterMembers()).thenReturn(Collections.singleton(new Host("localhost", 9090, null)));
ControllerServiceGrpc.ControllerServiceImplBase controllerServiceImplBase = new ControllerServiceImpl(new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelper, EXECUTOR, mockCluster, requestTracker), authHelper, requestTracker, true, true, 2);
ControllerServiceGrpc.ControllerServiceImplBase controllerServiceImplBaseStrict = new ControllerServiceImpl(new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelper, EXECUTOR, mockCluster, requestTracker), authHelper, requestTracker, true, false, 2);
PasswordAuthHandler authHandler = new PasswordAuthHandler();
authHandler.initialize(AUTH_FILE.getAbsolutePath());
String uniqueServerName = String.format("Test server name: %s", getClass());
String uniqueServerNameStrict = String.format("Test server name: %sStrict", getClass());
// Using a builder that creates a server for servicing in-process requests.
// Also, using a direct executor which executes app code directly in transport thread. See
// https://grpc.io/grpc-java/javadoc/io/grpc/inprocess/InProcessServerBuilder.html for more information.
grpcServer = InProcessServerBuilder.forName(uniqueServerName).addService(ServerInterceptors.intercept(controllerServiceImplBase, new AuthInterceptor(authHandler))).directExecutor().build().start();
grpcServerStrict = InProcessServerBuilder.forName(uniqueServerNameStrict).addService(ServerInterceptors.intercept(controllerServiceImplBaseStrict, new AuthInterceptor(authHandler))).directExecutor().build().start();
inProcessChannel = InProcessChannelBuilder.forName(uniqueServerName).directExecutor().build();
inProcessChannelStrict = InProcessChannelBuilder.forName(uniqueServerNameStrict).directExecutor().build();
}
use of io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask in project pravega by pravega.
the class RequestHandlersTest method testScaleIgnoreFairness.
@Test
public void testScaleIgnoreFairness() {
StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtTasks, executor), executor);
String fairness = "fairness";
streamStore.createScope(fairness, null, executor).join();
streamMetadataTasks.createStream(fairness, fairness, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build(), System.currentTimeMillis(), 0L).join();
// 1. set segment helper mock to throw exception
doAnswer(x -> Futures.failedFuture(new RuntimeException())).when(segmentHelper).sealSegment(anyString(), anyString(), anyLong(), anyString(), anyLong());
// 2. start scale --> this should fail with a retryable exception while talking to segment store!
ScaleOpEvent scaleEvent = new ScaleOpEvent(fairness, fairness, Collections.singletonList(0L), Collections.singletonList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), false, System.currentTimeMillis(), 0L);
AssertExtensions.assertFutureThrows("", streamRequestHandler.process(scaleEvent, () -> false), e -> Exceptions.unwrap(e) instanceof RuntimeException);
// verify that scale was started
assertEquals(State.SCALING, streamStore.getState(fairness, fairness, true, null, executor).join());
// 3. set waiting processor to "random name"
streamStore.createWaitingRequestIfAbsent(fairness, fairness, "myProcessor", null, executor).join();
// 4. reset segment helper to return success
doAnswer(x -> CompletableFuture.completedFuture(true)).when(segmentHelper).sealSegment(anyString(), anyString(), anyLong(), anyString(), anyLong());
// 5. process again. it should succeed while ignoring waiting processor
streamRequestHandler.process(scaleEvent, () -> false).join();
EpochRecord activeEpoch = streamStore.getActiveEpoch(fairness, fairness, null, true, executor).join();
assertEquals(1, activeEpoch.getEpoch());
assertEquals(State.ACTIVE, streamStore.getState(fairness, fairness, true, null, executor).join());
// 6. run a new scale. it should fail because of waiting processor.
ScaleOpEvent scaleEvent2 = new ScaleOpEvent(fairness, fairness, Collections.singletonList(NameUtils.computeSegmentId(1, 1)), Collections.singletonList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), false, System.currentTimeMillis(), 0L);
AssertExtensions.assertFutureThrows("", streamRequestHandler.process(scaleEvent2, () -> false), e -> Exceptions.unwrap(e) instanceof StoreException.OperationNotAllowedException);
streamStore.deleteWaitingRequestConditionally(fairness, fairness, "myProcessor", null, executor).join();
}
Aggregations