Search in sources :

Example 11 with StreamMetadataStore

use of io.pravega.controller.store.stream.StreamMetadataStore in project pravega by pravega.

the class ControllerEventProcessorsTest method testHandleOrphaned.

@Test(timeout = 10000)
public void testHandleOrphaned() throws CheckpointStoreException {
    LocalController localController = mock(LocalController.class);
    CheckpointStore checkpointStore = mock(CheckpointStore.class);
    StreamMetadataStore streamStore = mock(StreamMetadataStore.class);
    BucketStore bucketStore = mock(BucketStore.class);
    ConnectionPool connectionPool = mock(ConnectionPool.class);
    StreamMetadataTasks streamMetadataTasks = mock(StreamMetadataTasks.class);
    StreamTransactionMetadataTasks streamTransactionMetadataTasks = mock(StreamTransactionMetadataTasks.class);
    KVTableMetadataStore kvtStore = mock(KVTableMetadataStore.class);
    TableMetadataTasks kvtTasks = mock(TableMetadataTasks.class);
    ControllerEventProcessorConfig config = ControllerEventProcessorConfigImpl.withDefault();
    EventProcessorSystem system = mock(EventProcessorSystem.class);
    EventProcessorGroup<ControllerEvent> processor = getProcessor();
    EventProcessorGroup<ControllerEvent> mockProcessor = spy(processor);
    doThrow(new CheckpointStoreException("host not found")).when(mockProcessor).notifyProcessFailure("host3");
    when(system.createEventProcessorGroup(any(), any(), any())).thenReturn(mockProcessor);
    @Cleanup ControllerEventProcessors processors = new ControllerEventProcessors("host1", config, localController, checkpointStore, streamStore, bucketStore, connectionPool, streamMetadataTasks, streamTransactionMetadataTasks, kvtStore, kvtTasks, system, executorService());
    // check for a case where init is not initialized so that kvtRequestProcessors don't get initialized and will be null
    assertTrue(Futures.await(processors.sweepFailedProcesses(() -> Sets.newHashSet("host1"))));
    Assert.assertFalse(processors.isReady());
    Assert.assertFalse(processors.isBootstrapCompleted());
    Assert.assertFalse(processors.isMetadataServiceConnected());
    processors.startAsync();
    processors.awaitRunning();
    assertTrue(Futures.await(processors.sweepFailedProcesses(() -> Sets.newHashSet("host1"))));
    assertTrue(Futures.await(processors.handleFailedProcess("host1")));
    AssertExtensions.assertFutureThrows("host not found", processors.handleFailedProcess("host3"), e -> e instanceof CheckpointStoreException);
    processors.shutDown();
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) KVTableMetadataStore(io.pravega.controller.store.kvtable.KVTableMetadataStore) EventProcessorSystem(io.pravega.controller.eventProcessor.EventProcessorSystem) CheckpointStore(io.pravega.controller.store.checkpoint.CheckpointStore) ZKCheckpointStore(io.pravega.controller.store.checkpoint.ZKCheckpointStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) Cleanup(lombok.Cleanup) ControllerEvent(io.pravega.shared.controller.event.ControllerEvent) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) CheckpointStoreException(io.pravega.controller.store.checkpoint.CheckpointStoreException) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) Test(org.junit.Test)

Example 12 with StreamMetadataStore

use of io.pravega.controller.store.stream.StreamMetadataStore in project pravega by pravega.

the class ScaleRequestHandlerTest method concurrentIdenticalScaleRun.

private void concurrentIdenticalScaleRun(String stream, String func, boolean isManual, Predicate<Throwable> firstExceptionPredicate, boolean expectFailureOnSecondJob, Predicate<Throwable> secondExceptionPredicate, Map<String, Integer> invocationCount) throws Exception {
    StreamMetadataStore streamStore1 = getStore();
    StreamMetadataStore streamStore1Spied = spy(getStore());
    StreamConfiguration config = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.byEventRate(1, 2, 1)).build();
    streamStore1.createStream(scope, stream, config, System.currentTimeMillis(), null, executor).join();
    streamStore1.setState(scope, stream, State.ACTIVE, null, executor).join();
    CompletableFuture<Void> wait = new CompletableFuture<>();
    CompletableFuture<Void> signal = new CompletableFuture<>();
    ScaleOpEvent event = new ScaleOpEvent(scope, stream, Lists.newArrayList(0L), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), isManual, System.currentTimeMillis(), System.currentTimeMillis());
    if (isManual) {
        streamStore1.submitScale(scope, stream, Lists.newArrayList(0L), Lists.newArrayList(new AbstractMap.SimpleEntry<>(0.0, 1.0)), System.currentTimeMillis(), null, null, executor).join();
    }
    StreamMetadataStore streamStore2 = getStore();
    ScaleOperationTask scaleRequestHandler1 = new ScaleOperationTask(streamMetadataTasks, streamStore1Spied, executor);
    ScaleOperationTask scaleRequestHandler2 = new ScaleOperationTask(streamMetadataTasks, streamStore2, executor);
    setMockLatch(streamStore1, streamStore1Spied, func, signal, wait);
    // the processing will stall at start scale
    CompletableFuture<Void> future1 = CompletableFuture.completedFuture(null).thenComposeAsync(v -> scaleRequestHandler1.execute(event), executor);
    signal.join();
    // let this run to completion. this should succeed
    if (!expectFailureOnSecondJob) {
        scaleRequestHandler2.execute(event).join();
    } else {
        AssertExtensions.assertSuppliedFutureThrows("second job should fail", () -> scaleRequestHandler2.execute(event), secondExceptionPredicate);
    }
    // verify that scale is complete
    // now complete wait latch.
    wait.complete(null);
    AssertExtensions.assertSuppliedFutureThrows("first scale should fail", () -> future1, firstExceptionPredicate);
    verify(streamStore1Spied, times(invocationCount.get("startScale"))).startScale(anyString(), anyString(), anyBoolean(), any(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("scaleCreateNewEpochs"))).scaleCreateNewEpochs(anyString(), anyString(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("scaleSegmentsSealed"))).scaleSegmentsSealed(anyString(), anyString(), any(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("completeScale"))).completeScale(anyString(), anyString(), any(), any(), any());
    verify(streamStore1Spied, times(invocationCount.get("updateVersionedState"))).updateVersionedState(anyString(), anyString(), any(), any(), any(), any());
    // validate scale done
    VersionedMetadata<EpochTransitionRecord> versioned = streamStore1.getEpochTransition(scope, stream, null, executor).join();
    assertEquals(EpochTransitionRecord.EMPTY, versioned.getObject());
    assertEquals(2, getVersionNumber(versioned));
    assertEquals(1, streamStore1.getActiveEpoch(scope, stream, null, true, executor).join().getEpoch());
    assertEquals(State.ACTIVE, streamStore1.getState(scope, stream, true, null, executor).join());
    streamStore1.close();
    streamStore2.close();
}
Also used : AbstractMap(java.util.AbstractMap) CompletableFuture(java.util.concurrent.CompletableFuture) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) ScaleOperationTask(io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask) ScaleOpEvent(io.pravega.shared.controller.event.ScaleOpEvent)

Example 13 with StreamMetadataStore

use of io.pravega.controller.store.stream.StreamMetadataStore in project pravega by pravega.

the class ControllerGrpcAuthFocusedTest method setup.

@Before
public void setup() throws IOException {
    TaskMetadataStore taskMetadataStore = TaskStoreFactory.createInMemoryStore(EXECUTOR);
    StreamMetadataStore streamStore = StreamStoreFactory.createInMemoryStore();
    this.kvtStore = spy(KVTableStoreFactory.createInMemoryStore(streamStore, EXECUTOR));
    BucketStore bucketStore = StreamStoreFactory.createInMemoryBucketStore();
    SegmentHelper segmentHelper = SegmentHelperMock.getSegmentHelperMock();
    RequestTracker requestTracker = new RequestTracker(true);
    StreamMetrics.initialize();
    TransactionMetrics.initialize();
    GrpcAuthHelper authHelper = new GrpcAuthHelper(true, "secret", 300);
    EventHelper helper = EventHelperMock.getEventHelperMock(EXECUTOR, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
    streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelper, EXECUTOR, "host", authHelper, helper);
    streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelper, EXECUTOR, "host", authHelper);
    kvtMetadataTasks = new TableMetadataTasks(kvtStore, segmentHelper, EXECUTOR, EXECUTOR, "host", authHelper, helper);
    StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, EXECUTOR), new ScaleOperationTask(streamMetadataTasks, streamStore, EXECUTOR), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, EXECUTOR), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, EXECUTOR), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, EXECUTOR), new TruncateStreamTask(streamMetadataTasks, streamStore, EXECUTOR), new CreateReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, EXECUTOR), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, EXECUTOR), EXECUTOR);
    streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, EXECUTOR));
    streamTransactionMetadataTasks.initializeStreamWriters(new EventStreamWriterMock<>(), new EventStreamWriterMock<>());
    Cluster mockCluster = mock(Cluster.class);
    when(mockCluster.getClusterMembers()).thenReturn(Collections.singleton(new Host("localhost", 9090, null)));
    ControllerServiceGrpc.ControllerServiceImplBase controllerServiceImplBase = new ControllerServiceImpl(new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelper, EXECUTOR, mockCluster, requestTracker), authHelper, requestTracker, true, true, 2);
    ControllerServiceGrpc.ControllerServiceImplBase controllerServiceImplBaseStrict = new ControllerServiceImpl(new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelper, EXECUTOR, mockCluster, requestTracker), authHelper, requestTracker, true, false, 2);
    PasswordAuthHandler authHandler = new PasswordAuthHandler();
    authHandler.initialize(AUTH_FILE.getAbsolutePath());
    String uniqueServerName = String.format("Test server name: %s", getClass());
    String uniqueServerNameStrict = String.format("Test server name: %sStrict", getClass());
    // Using a builder that creates a server for servicing in-process requests.
    // Also, using a direct executor which executes app code directly in transport thread. See
    // https://grpc.io/grpc-java/javadoc/io/grpc/inprocess/InProcessServerBuilder.html for more information.
    grpcServer = InProcessServerBuilder.forName(uniqueServerName).addService(ServerInterceptors.intercept(controllerServiceImplBase, new AuthInterceptor(authHandler))).directExecutor().build().start();
    grpcServerStrict = InProcessServerBuilder.forName(uniqueServerNameStrict).addService(ServerInterceptors.intercept(controllerServiceImplBaseStrict, new AuthInterceptor(authHandler))).directExecutor().build().start();
    inProcessChannel = InProcessChannelBuilder.forName(uniqueServerName).directExecutor().build();
    inProcessChannelStrict = InProcessChannelBuilder.forName(uniqueServerNameStrict).directExecutor().build();
}
Also used : DeleteScopeTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteScopeTask) CreateReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.CreateReaderGroupTask) AuthInterceptor(io.pravega.controller.server.security.auth.handler.AuthInterceptor) AuthFileUtils.credentialsAndAclAsString(io.pravega.auth.AuthFileUtils.credentialsAndAclAsString) AbstractStreamMetadataStore(io.pravega.controller.store.stream.AbstractStreamMetadataStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) RequestTracker(io.pravega.common.tracing.RequestTracker) ControllerService(io.pravega.controller.server.ControllerService) AutoScaleTask(io.pravega.controller.server.eventProcessor.requesthandlers.AutoScaleTask) ControllerServiceImpl(io.pravega.controller.server.rpc.grpc.v1.ControllerServiceImpl) ControllerEventStreamWriterMock(io.pravega.controller.mocks.ControllerEventStreamWriterMock) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) ControllerServiceGrpc(io.pravega.controller.stream.api.grpc.v1.ControllerServiceGrpc) SealStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.SealStreamTask) PasswordAuthHandler(io.pravega.authplugin.basic.PasswordAuthHandler) TaskMetadataStore(io.pravega.controller.store.task.TaskMetadataStore) UpdateStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.UpdateStreamTask) DeleteReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteReaderGroupTask) Cluster(io.pravega.common.cluster.Cluster) Host(io.pravega.common.cluster.Host) SegmentHelper(io.pravega.controller.server.SegmentHelper) ScaleOperationTask(io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask) StreamRequestHandler(io.pravega.controller.server.eventProcessor.requesthandlers.StreamRequestHandler) GrpcAuthHelper(io.pravega.controller.server.security.auth.GrpcAuthHelper) DeleteStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteStreamTask) UpdateReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask) EventHelper(io.pravega.controller.task.EventHelper) TruncateStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.TruncateStreamTask) Before(org.junit.Before)

Example 14 with StreamMetadataStore

use of io.pravega.controller.store.stream.StreamMetadataStore in project pravega by pravega.

the class StreamMetadataTasksTest method deleteScopeRecursiveTest.

@Test
public void deleteScopeRecursiveTest() {
    WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
    StreamMetadataStore storeSpy = spy(getStore());
    final String testDeleteScope = "testDelete";
    // Call deleteScopeRecursive() without creating a scope
    Controller.DeleteScopeStatus.Status status = streamMetadataTasks.deleteScopeRecursive(testDeleteScope, 123L).join();
    assertEquals(status, Controller.DeleteScopeStatus.Status.SUCCESS);
    streamStorePartialMock.createScope(testDeleteScope, null, executor).join();
    streamMetadataTasks.setRequestEventWriter(requestEventWriter);
    DeleteScopeEvent deleteScopeEvent = new DeleteScopeEvent(SCOPE, 2L, UUID.randomUUID());
    requestEventWriter.writeEvent(deleteScopeEvent);
    doAnswer(x -> {
        CompletableFuture<Boolean> future = new CompletableFuture<>();
        future.complete(true);
        return future;
    }).when(spy(storeSpy)).isScopeSealed(testDeleteScope, null, executor);
    doAnswer(x -> {
        CompletableFuture<UUID> future = new CompletableFuture<>();
        future.complete(UUID.randomUUID());
        return future;
    }).when(spy(storeSpy)).getScopeId(testDeleteScope, null, executor);
    consumer.deleteScopeRecursive(SCOPE, 123L).join();
}
Also used : CompletableFuture(java.util.concurrent.CompletableFuture) DeleteScopeEvent(io.pravega.shared.controller.event.DeleteScopeEvent) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) ControllerEventStreamWriterMock(io.pravega.controller.mocks.ControllerEventStreamWriterMock) EventStreamWriterMock(io.pravega.controller.mocks.EventStreamWriterMock) AbstractStreamMetadataStore(io.pravega.controller.store.stream.AbstractStreamMetadataStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) UUID(java.util.UUID) Test(org.junit.Test)

Example 15 with StreamMetadataStore

use of io.pravega.controller.store.stream.StreamMetadataStore in project pravega by pravega.

the class StreamMetadataTasksTest method deletePartiallyCreatedStreamTest.

@Test(timeout = 30000)
public void deletePartiallyCreatedStreamTest() throws InterruptedException {
    WriterMock requestEventWriter = new WriterMock(streamMetadataTasks, executor);
    streamMetadataTasks.setRequestEventWriter(requestEventWriter);
    StreamMetadataStore store = streamStorePartialMock;
    final String scopeName = "RecreationScopePartial";
    final String streamName = "RecreatedStreamPartial";
    store.createScope(scopeName, null, executor).join();
    Controller.DeleteStreamStatus.Status deleteStatus;
    // region case 1: only add stream to scope without any additional metadata
    StreamMetadataStoreTestHelper.addStreamToScope(store, scopeName, streamName);
    assertTrue(store.checkStreamExists(scopeName, streamName, null, executor).join());
    deleteStatus = streamMetadataTasks.deleteStream(scopeName, streamName, 0L).join();
    assertEquals(Controller.DeleteStreamStatus.Status.SUCCESS, deleteStatus);
    // verify that event is not posted
    assertTrue(requestEventWriter.eventQueue.isEmpty());
    // endregion
    // region case 2: only add creation time for the stream and then delete it.
    StreamMetadataStoreTestHelper.partiallyCreateStream(store, scopeName, streamName, Optional.of(100L), false);
    assertTrue(store.checkStreamExists(scopeName, streamName, null, executor).join());
    deleteStatus = streamMetadataTasks.deleteStream(scopeName, streamName, 0L).join();
    assertEquals(Controller.DeleteStreamStatus.Status.SUCCESS, deleteStatus);
    // verify that event is not posted
    assertTrue(requestEventWriter.eventQueue.isEmpty());
    // endregion
    // region case 3: create stream again but this time create the `state` but not history record.
    // this should result in delete workflow being invoked as segments also have to be deleted.
    StreamMetadataStoreTestHelper.partiallyCreateStream(store, scopeName, streamName, Optional.of(100L), true);
    assertTrue(store.checkStreamExists(scopeName, streamName, null, executor).join());
    CompletableFuture<Controller.DeleteStreamStatus.Status> future = streamMetadataTasks.deleteStream(scopeName, streamName, 0L);
    assertTrue(Futures.await(processEvent(requestEventWriter)));
    assertEquals(Controller.DeleteStreamStatus.Status.SUCCESS, future.join());
    // endregion
    // region case 4: now create full stream metadata.
    // now create full stream metadata without setting state to active
    // since there was no active segments, so we should have segments created from segment 0.
    // configuration 2 has 3 segments. So highest segment number should be 2.
    StreamConfiguration configuration = StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(3)).build();
    store.createStream(scopeName, streamName, configuration, 101L, null, executor).join();
    assertTrue(store.checkStreamExists(scopeName, streamName, null, executor).join());
    assertEquals(store.getActiveEpoch(scopeName, streamName, null, true, executor).join().getSegmentIds().stream().max(Long::compareTo).get().longValue(), 2L);
    // delete stream should succeed
    future = streamMetadataTasks.deleteStream(scopeName, streamName, 0L);
    assertTrue(Futures.await(processEvent(requestEventWriter)));
    assertEquals(Controller.DeleteStreamStatus.Status.SUCCESS, future.join());
    store.createStream(scopeName, streamName, configuration, 102L, null, executor).join();
    assertEquals(store.getActiveEpoch(scopeName, streamName, null, true, executor).join().getSegmentIds().stream().max(Long::compareTo).get().longValue(), 5L);
// endregion
}
Also used : UpdateSubscriberStatus(io.pravega.controller.stream.api.grpc.v1.Controller.UpdateSubscriberStatus) DeleteReaderGroupStatus(io.pravega.controller.stream.api.grpc.v1.Controller.DeleteReaderGroupStatus) ScaleStreamStatus(io.pravega.controller.stream.api.grpc.v1.Controller.ScaleResponse.ScaleStreamStatus) TxnStatus(io.pravega.controller.store.stream.TxnStatus) UpdateStreamStatus(io.pravega.controller.stream.api.grpc.v1.Controller.UpdateStreamStatus) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) ControllerEventStreamWriterMock(io.pravega.controller.mocks.ControllerEventStreamWriterMock) EventStreamWriterMock(io.pravega.controller.mocks.EventStreamWriterMock) AbstractStreamMetadataStore(io.pravega.controller.store.stream.AbstractStreamMetadataStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) Controller(io.pravega.controller.stream.api.grpc.v1.Controller) Test(org.junit.Test)

Aggregations

StreamMetadataStore (io.pravega.controller.store.stream.StreamMetadataStore)38 Test (org.junit.Test)29 CompletableFuture (java.util.concurrent.CompletableFuture)23 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)17 ArgumentMatchers.anyString (org.mockito.ArgumentMatchers.anyString)17 UUID (java.util.UUID)14 StreamMetadataTasks (io.pravega.controller.task.Stream.StreamMetadataTasks)13 BucketStore (io.pravega.controller.store.stream.BucketStore)12 StreamTransactionMetadataTasks (io.pravega.controller.task.Stream.StreamTransactionMetadataTasks)12 Cleanup (lombok.Cleanup)12 SegmentHelper (io.pravega.controller.server.SegmentHelper)11 GrpcAuthHelper (io.pravega.controller.server.security.auth.GrpcAuthHelper)11 TaskMetadataStore (io.pravega.controller.store.task.TaskMetadataStore)11 KVTableMetadataStore (io.pravega.controller.store.kvtable.KVTableMetadataStore)10 UpdateStreamTask (io.pravega.controller.server.eventProcessor.requesthandlers.UpdateStreamTask)9 EventStreamWriterMock (io.pravega.controller.mocks.EventStreamWriterMock)8 ScaleOperationTask (io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask)8 CommitEvent (io.pravega.shared.controller.event.CommitEvent)8 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)8 CuratorFramework (org.apache.curator.framework.CuratorFramework)8