Search in sources :

Example 6 with StreamMetadataTasks

use of io.pravega.controller.task.Stream.StreamMetadataTasks in project pravega by pravega.

the class ControllerEventProcessorsTest method testIsReady.

@Test(timeout = 30000L)
public void testIsReady() throws Exception {
    LocalController controller = mock(LocalController.class);
    StreamMetadataStore streamStore = mock(StreamMetadataStore.class);
    BucketStore bucketStore = mock(BucketStore.class);
    ConnectionPool connectionPool = mock(ConnectionPool.class);
    StreamMetadataTasks streamMetadataTasks = mock(StreamMetadataTasks.class);
    StreamTransactionMetadataTasks streamTransactionMetadataTasks = mock(StreamTransactionMetadataTasks.class);
    KVTableMetadataStore kvtStore = mock(KVTableMetadataStore.class);
    TableMetadataTasks kvtTasks = mock(TableMetadataTasks.class);
    ControllerEventProcessorConfig config = ControllerEventProcessorConfigImpl.withDefault();
    EventProcessorSystem system = mock(EventProcessorSystem.class);
    CuratorZookeeperClient curatorZKClientMock = mock(CuratorZookeeperClient.class);
    CuratorFramework client = mock(CuratorFramework.class);
    Listenable listen = mock(Listenable.class);
    doNothing().when(listen).addListener(any(ConnectionStateListener.class));
    doReturn(listen).when(client).getConnectionStateListenable();
    doReturn(curatorZKClientMock).when(client).getZookeeperClient();
    doReturn(true).when(curatorZKClientMock).isConnected();
    ZKCheckpointStore checkpointStore = (ZKCheckpointStore) CheckpointStoreFactory.createZKStore(client);
    doAnswer(x -> null).when(streamMetadataTasks).initializeStreamWriters(any(), any());
    doAnswer(x -> null).when(streamTransactionMetadataTasks).initializeStreamWriters(any(EventStreamClientFactory.class), any(ControllerEventProcessorConfig.class));
    CompletableFuture<Boolean> createScopeResponseFuture = new CompletableFuture<>();
    CompletableFuture<Void> createScopeSignalFuture = new CompletableFuture<>();
    doAnswer(x -> {
        createScopeSignalFuture.complete(null);
        return createScopeResponseFuture;
    }).when(controller).createScope(anyString());
    LinkedBlockingQueue<CompletableFuture<Boolean>> createStreamResponses = new LinkedBlockingQueue<>();
    LinkedBlockingQueue<CompletableFuture<Void>> createStreamSignals = new LinkedBlockingQueue<>();
    List<CompletableFuture<Boolean>> createStreamResponsesList = new LinkedList<>();
    List<CompletableFuture<Void>> createStreamSignalsList = new LinkedList<>();
    for (int i = 0; i < 4; i++) {
        CompletableFuture<Boolean> responseFuture = new CompletableFuture<>();
        CompletableFuture<Void> signalFuture = new CompletableFuture<>();
        createStreamResponsesList.add(responseFuture);
        createStreamResponses.add(responseFuture);
        createStreamSignalsList.add(signalFuture);
        createStreamSignals.add(signalFuture);
    }
    // return a future from latches queue
    doAnswer(x -> {
        createStreamSignals.take().complete(null);
        return createStreamResponses.take();
    }).when(controller).createInternalStream(anyString(), anyString(), any());
    @Cleanup ControllerEventProcessors processors = spy(new ControllerEventProcessors("host1", config, controller, checkpointStore, streamStore, bucketStore, connectionPool, streamMetadataTasks, streamTransactionMetadataTasks, kvtStore, kvtTasks, system, executorService()));
    // Check isReady() method before invoking bootstrap
    Assert.assertFalse(processors.getBootstrapCompleted().get());
    Assert.assertTrue(processors.isMetadataServiceConnected());
    Assert.assertFalse(processors.isRunning());
    Assert.assertFalse(processors.isReady());
    // Call bootstrap on ControllerEventProcessors
    processors.bootstrap(streamTransactionMetadataTasks, streamMetadataTasks, kvtTasks);
    // Wait on create scope being called.
    createScopeSignalFuture.join();
    createScopeResponseFuture.complete(true);
    createStreamSignalsList.get(0).join();
    createStreamSignalsList.get(1).join();
    createStreamSignalsList.get(2).join();
    createStreamSignalsList.get(3).join();
    createStreamResponsesList.get(0).complete(true);
    createStreamResponsesList.get(1).complete(true);
    createStreamResponsesList.get(2).complete(true);
    createStreamResponsesList.get(3).complete(true);
    AssertExtensions.assertEventuallyEquals(true, () -> processors.getBootstrapCompleted().get(), 10000);
    Assert.assertTrue(processors.isMetadataServiceConnected());
    Assert.assertFalse(processors.isRunning());
    Assert.assertFalse(processors.isReady());
    EventProcessorGroup mockEventProcessorGroup = mock(EventProcessorGroup.class);
    doNothing().when(mockEventProcessorGroup).awaitRunning();
    doReturn(mockEventProcessorGroup).when(system).createEventProcessorGroup(any(EventProcessorConfig.class), any(CheckpointStore.class), any(ScheduledExecutorService.class));
    processors.startAsync();
    processors.awaitRunning();
    Assert.assertTrue(processors.isMetadataServiceConnected());
    Assert.assertTrue(processors.isBootstrapCompleted());
    Assert.assertTrue(processors.isRunning());
    Assert.assertTrue(processors.isReady());
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) EventProcessorSystem(io.pravega.controller.eventProcessor.EventProcessorSystem) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) CheckpointStore(io.pravega.controller.store.checkpoint.CheckpointStore) ZKCheckpointStore(io.pravega.controller.store.checkpoint.ZKCheckpointStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) Cleanup(lombok.Cleanup) ZKCheckpointStore(io.pravega.controller.store.checkpoint.ZKCheckpointStore) CuratorFramework(org.apache.curator.framework.CuratorFramework) CompletableFuture(java.util.concurrent.CompletableFuture) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ConnectionStateListener(org.apache.curator.framework.state.ConnectionStateListener) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) KVTableMetadataStore(io.pravega.controller.store.kvtable.KVTableMetadataStore) EventProcessorGroup(io.pravega.controller.eventProcessor.EventProcessorGroup) LinkedList(java.util.LinkedList) Listenable(org.apache.curator.framework.listen.Listenable) CuratorZookeeperClient(org.apache.curator.CuratorZookeeperClient) EventProcessorConfig(io.pravega.controller.eventProcessor.EventProcessorConfig) Test(org.junit.Test)

Example 7 with StreamMetadataTasks

use of io.pravega.controller.task.Stream.StreamMetadataTasks in project pravega by pravega.

the class ControllerEventProcessorsTest method testHandleOrphaned.

@Test(timeout = 10000)
public void testHandleOrphaned() throws CheckpointStoreException {
    LocalController localController = mock(LocalController.class);
    CheckpointStore checkpointStore = mock(CheckpointStore.class);
    StreamMetadataStore streamStore = mock(StreamMetadataStore.class);
    BucketStore bucketStore = mock(BucketStore.class);
    ConnectionPool connectionPool = mock(ConnectionPool.class);
    StreamMetadataTasks streamMetadataTasks = mock(StreamMetadataTasks.class);
    StreamTransactionMetadataTasks streamTransactionMetadataTasks = mock(StreamTransactionMetadataTasks.class);
    KVTableMetadataStore kvtStore = mock(KVTableMetadataStore.class);
    TableMetadataTasks kvtTasks = mock(TableMetadataTasks.class);
    ControllerEventProcessorConfig config = ControllerEventProcessorConfigImpl.withDefault();
    EventProcessorSystem system = mock(EventProcessorSystem.class);
    EventProcessorGroup<ControllerEvent> processor = getProcessor();
    EventProcessorGroup<ControllerEvent> mockProcessor = spy(processor);
    doThrow(new CheckpointStoreException("host not found")).when(mockProcessor).notifyProcessFailure("host3");
    when(system.createEventProcessorGroup(any(), any(), any())).thenReturn(mockProcessor);
    @Cleanup ControllerEventProcessors processors = new ControllerEventProcessors("host1", config, localController, checkpointStore, streamStore, bucketStore, connectionPool, streamMetadataTasks, streamTransactionMetadataTasks, kvtStore, kvtTasks, system, executorService());
    // check for a case where init is not initialized so that kvtRequestProcessors don't get initialized and will be null
    assertTrue(Futures.await(processors.sweepFailedProcesses(() -> Sets.newHashSet("host1"))));
    Assert.assertFalse(processors.isReady());
    Assert.assertFalse(processors.isBootstrapCompleted());
    Assert.assertFalse(processors.isMetadataServiceConnected());
    processors.startAsync();
    processors.awaitRunning();
    assertTrue(Futures.await(processors.sweepFailedProcesses(() -> Sets.newHashSet("host1"))));
    assertTrue(Futures.await(processors.handleFailedProcess("host1")));
    AssertExtensions.assertFutureThrows("host not found", processors.handleFailedProcess("host3"), e -> e instanceof CheckpointStoreException);
    processors.shutDown();
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) KVTableMetadataStore(io.pravega.controller.store.kvtable.KVTableMetadataStore) EventProcessorSystem(io.pravega.controller.eventProcessor.EventProcessorSystem) CheckpointStore(io.pravega.controller.store.checkpoint.CheckpointStore) ZKCheckpointStore(io.pravega.controller.store.checkpoint.ZKCheckpointStore) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) Cleanup(lombok.Cleanup) ControllerEvent(io.pravega.shared.controller.event.ControllerEvent) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) CheckpointStoreException(io.pravega.controller.store.checkpoint.CheckpointStoreException) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) Test(org.junit.Test)

Example 8 with StreamMetadataTasks

use of io.pravega.controller.task.Stream.StreamMetadataTasks in project pravega by pravega.

the class ControllerEventProcessorsTest method testTruncate.

@Test(timeout = 10000L)
public void testTruncate() throws CheckpointStoreException, InterruptedException {
    LocalController controller = mock(LocalController.class);
    CheckpointStore checkpointStore = mock(CheckpointStore.class);
    StreamMetadataStore streamStore = mock(StreamMetadataStore.class);
    BucketStore bucketStore = mock(BucketStore.class);
    ConnectionPool connectionPool = mock(ConnectionPool.class);
    StreamMetadataTasks streamMetadataTasks = mock(StreamMetadataTasks.class);
    StreamTransactionMetadataTasks streamTransactionMetadataTasks = mock(StreamTransactionMetadataTasks.class);
    KVTableMetadataStore kvtStore = mock(KVTableMetadataStore.class);
    TableMetadataTasks kvtTasks = mock(TableMetadataTasks.class);
    ControllerEventProcessorConfig config = ControllerEventProcessorConfigImpl.withDefault();
    EventProcessorSystem system = mock(EventProcessorSystem.class);
    Map<SegmentWithRange, Long> map1 = new HashMap<>();
    map1.put(new SegmentWithRange(new Segment("scope", "stream", 0L), 0.0, 0.33), 10L);
    map1.put(new SegmentWithRange(new Segment("scope", "stream", 1L), 0.33, 0.66), 10L);
    map1.put(new SegmentWithRange(new Segment("scope", "stream", 2L), 0.66, 1.0), 20L);
    Map<SegmentWithRange, Long> map2 = new HashMap<>();
    map2.put(new SegmentWithRange(new Segment("scope", "stream", 0L), 0.0, 0.33), 20L);
    map2.put(new SegmentWithRange(new Segment("scope", "stream", 2L), 0.66, 1.0), 10L);
    Map<SegmentWithRange, Long> map3 = new HashMap<>();
    map3.put(new SegmentWithRange(new Segment("scope", "stream", 3L), 0.0, 0.33), 0L);
    map3.put(new SegmentWithRange(new Segment("scope", "stream", 4L), 0.33, 0.66), 10L);
    map3.put(new SegmentWithRange(new Segment("scope", "stream", 5L), 0.66, 1.0), 20L);
    PositionImpl position1 = new PositionImpl(map1);
    PositionImpl position2 = new PositionImpl(map2);
    PositionImpl position3 = new PositionImpl(map3);
    doReturn(getProcessor()).when(system).createEventProcessorGroup(any(), any(), any());
    doReturn(CompletableFuture.completedFuture(null)).when(controller).createScope(anyString());
    doReturn(CompletableFuture.completedFuture(null)).when(controller).createInternalStream(anyString(), anyString(), any());
    doNothing().when(streamMetadataTasks).initializeStreamWriters(any(), anyString());
    doNothing().when(streamTransactionMetadataTasks).initializeStreamWriters(any(EventStreamClientFactory.class), any(ControllerEventProcessorConfig.class));
    AtomicBoolean requestCalled = new AtomicBoolean(false);
    AtomicBoolean commitCalled = new AtomicBoolean(false);
    CompletableFuture<Void> requestStreamTruncationFuture = new CompletableFuture<>();
    CompletableFuture<Void> kvtStreamTruncationFuture = new CompletableFuture<>();
    CompletableFuture<Void> abortStreamTruncationFuture = new CompletableFuture<>();
    CompletableFuture<Void> commitStreamTruncationFuture = new CompletableFuture<>();
    doAnswer(x -> {
        String argument = x.getArgument(1);
        if (argument.equals(config.getRequestStreamName())) {
            // let one of the processors throw the exception. this should still be retried in the next cycle.
            if (!requestCalled.get()) {
                requestCalled.set(true);
                throw new RuntimeException("inducing sporadic failure");
            } else {
                requestStreamTruncationFuture.complete(null);
            }
        } else if (argument.equals(config.getCommitStreamName())) {
            // let one of the processors throw the exception. this should still be retried in the next cycle.
            if (commitCalled.get()) {
                commitStreamTruncationFuture.complete(null);
            } else {
                commitCalled.set(true);
                return CompletableFuture.completedFuture(false);
            }
        } else if (argument.equals(config.getAbortStreamName())) {
            abortStreamTruncationFuture.complete(null);
        } else if (argument.equals(config.getKvtStreamName())) {
            kvtStreamTruncationFuture.complete(null);
        }
        return CompletableFuture.completedFuture(true);
    }).when(streamMetadataTasks).startTruncation(anyString(), anyString(), any(), any());
    Set<String> processes = Sets.newHashSet("p1", "p2", "p3");
    // first throw checkpoint store exception
    AtomicBoolean signal = new AtomicBoolean(false);
    CountDownLatch cd = new CountDownLatch(4);
    doAnswer(x -> {
        // this ensures that the call to truncate has been invoked for all 4 internal streams.
        cd.countDown();
        cd.await();
        if (!signal.get()) {
            throw new CheckpointStoreException("CheckpointStoreException");
        } else {
            return processes;
        }
    }).when(checkpointStore).getProcesses();
    Map<String, PositionImpl> r1 = Collections.singletonMap("r1", position1);
    doReturn(r1).when(checkpointStore).getPositions(eq("p1"), anyString());
    Map<String, PositionImpl> r2 = Collections.singletonMap("r2", position1);
    doReturn(r2).when(checkpointStore).getPositions(eq("p2"), anyString());
    Map<String, PositionImpl> r3 = Collections.singletonMap("r3", position1);
    doReturn(r3).when(checkpointStore).getPositions(eq("p3"), anyString());
    @Cleanup ControllerEventProcessors processors = new ControllerEventProcessors("host1", config, controller, checkpointStore, streamStore, bucketStore, connectionPool, streamMetadataTasks, streamTransactionMetadataTasks, kvtStore, kvtTasks, system, executorService());
    // set truncation interval
    processors.setTruncationInterval(100L);
    processors.startAsync();
    processors.awaitRunning();
    ControllerEventProcessors processorsSpied = spy(processors);
    processorsSpied.bootstrap(streamTransactionMetadataTasks, streamMetadataTasks, kvtTasks);
    // wait for all 4 countdown exceptions to have been thrown.
    cd.await();
    verify(processorsSpied, atLeast(4)).truncate(any(), any(), any());
    verify(checkpointStore, atLeast(4)).getProcesses();
    verify(checkpointStore, never()).getPositions(anyString(), anyString());
    verify(streamMetadataTasks, never()).startTruncation(anyString(), anyString(), any(), any());
    signal.set(true);
    CompletableFuture.allOf(requestStreamTruncationFuture, commitStreamTruncationFuture, abortStreamTruncationFuture, kvtStreamTruncationFuture).join();
    // verify that truncate method is being called periodically.
    verify(processorsSpied, atLeastOnce()).truncate(config.getRequestStreamName(), config.getRequestReaderGroupName(), streamMetadataTasks);
    verify(processorsSpied, atLeastOnce()).truncate(config.getCommitStreamName(), config.getCommitReaderGroupName(), streamMetadataTasks);
    verify(processorsSpied, atLeastOnce()).truncate(config.getAbortStreamName(), config.getAbortReaderGroupName(), streamMetadataTasks);
    verify(processorsSpied, atLeastOnce()).truncate(config.getKvtStreamName(), config.getKvtReaderGroupName(), streamMetadataTasks);
    for (int i = 1; i <= 3; i++) {
        verify(checkpointStore, atLeastOnce()).getPositions("p" + i, config.getRequestReaderGroupName());
        verify(checkpointStore, atLeastOnce()).getPositions("p" + i, config.getCommitReaderGroupName());
        verify(checkpointStore, atLeastOnce()).getPositions("p" + i, config.getAbortReaderGroupName());
        verify(checkpointStore, atLeastOnce()).getPositions("p" + i, config.getKvtReaderGroupName());
    }
}
Also used : ConnectionPool(io.pravega.client.connection.impl.ConnectionPool) EventProcessorSystem(io.pravega.controller.eventProcessor.EventProcessorSystem) HashMap(java.util.HashMap) PositionImpl(io.pravega.client.stream.impl.PositionImpl) EventStreamClientFactory(io.pravega.client.EventStreamClientFactory) CheckpointStore(io.pravega.controller.store.checkpoint.CheckpointStore) ZKCheckpointStore(io.pravega.controller.store.checkpoint.ZKCheckpointStore) ArgumentMatchers.anyString(org.mockito.ArgumentMatchers.anyString) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) Cleanup(lombok.Cleanup) Segment(io.pravega.client.segment.impl.Segment) CompletableFuture(java.util.concurrent.CompletableFuture) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) SegmentWithRange(io.pravega.client.stream.impl.SegmentWithRange) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) KVTableMetadataStore(io.pravega.controller.store.kvtable.KVTableMetadataStore) CountDownLatch(java.util.concurrent.CountDownLatch) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) CheckpointStoreException(io.pravega.controller.store.checkpoint.CheckpointStoreException) Test(org.junit.Test)

Example 9 with StreamMetadataTasks

use of io.pravega.controller.task.Stream.StreamMetadataTasks in project pravega by pravega.

the class ControllerServiceWithStreamTest method setup.

@Before
public void setup() {
    try {
        zkServer = new TestingServerStarter().start();
    } catch (Exception e) {
        log.error("Error starting ZK server", e);
    }
    zkClient = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new ExponentialBackoffRetry(200, 10, 5000));
    zkClient.start();
    streamStore = spy(getStore());
    kvtStore = spy(getKVTStore());
    BucketStore bucketStore = StreamStoreFactory.createZKBucketStore(zkClient, executor);
    TaskMetadataStore taskMetadataStore = TaskStoreFactory.createZKStore(zkClient, executor);
    connectionFactory = new SocketConnectionFactoryImpl(ClientConfig.builder().controllerURI(URI.create("tcp://localhost")).build());
    GrpcAuthHelper disabledAuthHelper = GrpcAuthHelper.getDisabledAuthHelper();
    SegmentHelper segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
    StreamMetrics.initialize();
    TransactionMetrics.initialize();
    EventHelper helperMock = EventHelperMock.getEventHelperMock(executor, "host", ((AbstractStreamMetadataStore) streamStore).getHostTaskIndex());
    streamMetadataTasks = new StreamMetadataTasks(streamStore, bucketStore, taskMetadataStore, segmentHelperMock, executor, "host", disabledAuthHelper, helperMock);
    kvtMetadataTasks = spy(new TableMetadataTasks(kvtStore, segmentHelperMock, executor, executor, "host", GrpcAuthHelper.getDisabledAuthHelper(), helperMock));
    streamTransactionMetadataTasks = new StreamTransactionMetadataTasks(streamStore, segmentHelperMock, executor, "host", disabledAuthHelper);
    StreamRequestHandler streamRequestHandler = new StreamRequestHandler(new AutoScaleTask(streamMetadataTasks, streamStore, executor), new ScaleOperationTask(streamMetadataTasks, streamStore, executor), new UpdateStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new SealStreamTask(streamMetadataTasks, streamTransactionMetadataTasks, streamStore, executor), new DeleteStreamTask(streamMetadataTasks, streamStore, bucketStore, executor), new TruncateStreamTask(streamMetadataTasks, streamStore, executor), new CreateReaderGroupTask(streamMetadataTasks, streamStore, executor), new DeleteReaderGroupTask(streamMetadataTasks, streamStore, executor), new UpdateReaderGroupTask(streamMetadataTasks, streamStore, executor), streamStore, new DeleteScopeTask(streamMetadataTasks, streamStore, kvtStore, kvtMetadataTasks, executor), executor);
    streamMetadataTasks.setRequestEventWriter(new ControllerEventStreamWriterMock(streamRequestHandler, executor));
    consumer = new ControllerService(kvtStore, kvtMetadataTasks, streamStore, bucketStore, streamMetadataTasks, streamTransactionMetadataTasks, segmentHelperMock, executor, null, requestTracker);
}
Also used : DeleteScopeTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteScopeTask) ExponentialBackoffRetry(org.apache.curator.retry.ExponentialBackoffRetry) CreateReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.CreateReaderGroupTask) AutoScaleTask(io.pravega.controller.server.eventProcessor.requesthandlers.AutoScaleTask) ControllerEventStreamWriterMock(io.pravega.controller.mocks.ControllerEventStreamWriterMock) StreamTransactionMetadataTasks(io.pravega.controller.task.Stream.StreamTransactionMetadataTasks) TableMetadataTasks(io.pravega.controller.task.KeyValueTable.TableMetadataTasks) BucketStore(io.pravega.controller.store.stream.BucketStore) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) SealStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.SealStreamTask) TestingServerStarter(io.pravega.test.common.TestingServerStarter) TaskMetadataStore(io.pravega.controller.store.task.TaskMetadataStore) UpdateStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.UpdateStreamTask) DeleteReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteReaderGroupTask) SocketConnectionFactoryImpl(io.pravega.client.connection.impl.SocketConnectionFactoryImpl) ScaleOperationTask(io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask) ExecutionException(java.util.concurrent.ExecutionException) StreamRequestHandler(io.pravega.controller.server.eventProcessor.requesthandlers.StreamRequestHandler) GrpcAuthHelper(io.pravega.controller.server.security.auth.GrpcAuthHelper) DeleteStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.DeleteStreamTask) UpdateReaderGroupTask(io.pravega.controller.server.eventProcessor.requesthandlers.UpdateReaderGroupTask) EventHelper(io.pravega.controller.task.EventHelper) TruncateStreamTask(io.pravega.controller.server.eventProcessor.requesthandlers.TruncateStreamTask) Before(org.junit.Before)

Example 10 with StreamMetadataTasks

use of io.pravega.controller.task.Stream.StreamMetadataTasks in project pravega by pravega.

the class TaskTest method setUp.

@Before
public void setUp() throws Exception {
    zkServer = new TestingServerStarter().start();
    zkServer.start();
    cli = CuratorFrameworkFactory.newClient(zkServer.getConnectString(), new RetryOneTime(2000));
    cli.start();
    streamStore = getStream();
    taskMetadataStore = TaskStoreFactory.createZKStore(cli, executor);
    segmentHelperMock = SegmentHelperMock.getSegmentHelperMock();
    streamMetadataTasks = new StreamMetadataTasks(streamStore, StreamStoreFactory.createInMemoryBucketStore(), taskMetadataStore, segmentHelperMock, executor, HOSTNAME, GrpcAuthHelper.getDisabledAuthHelper());
    final String stream2 = "stream2";
    final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
    final ScalingPolicy policy2 = ScalingPolicy.fixed(3);
    final StreamConfiguration configuration1 = StreamConfiguration.builder().scalingPolicy(policy1).build();
    final StreamConfiguration configuration2 = StreamConfiguration.builder().scalingPolicy(policy2).build();
    // region createStream
    streamStore.createScope(SCOPE, null, executor).join();
    long start = System.currentTimeMillis();
    streamStore.createStream(SCOPE, stream1, configuration1, start, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).join();
    streamStore.createStream(SCOPE, stream2, configuration2, start, null, executor).join();
    streamStore.setState(SCOPE, stream2, State.ACTIVE, null, executor).join();
    // endregion
    // region scaleSegments
    AbstractMap.SimpleEntry<Double, Double> segment1 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
    AbstractMap.SimpleEntry<Double, Double> segment2 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
    List<Long> sealedSegments = Collections.singletonList(1L);
    VersionedMetadata<EpochTransitionRecord> versioned = streamStore.submitScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), start + 20, null, null, executor).get();
    EpochTransitionRecord response = versioned.getObject();
    Map<Long, Map.Entry<Double, Double>> segmentsCreated = response.getNewSegmentsWithRange();
    VersionedMetadata<State> state = streamStore.getVersionedState(SCOPE, stream1, null, executor).join();
    state = streamStore.updateVersionedState(SCOPE, stream1, State.SCALING, state, null, executor).get();
    versioned = streamStore.startScale(SCOPE, stream1, false, versioned, state, null, executor).join();
    streamStore.scaleCreateNewEpochs(SCOPE, stream1, versioned, null, executor).get();
    streamStore.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, null, executor).get();
    streamStore.completeScale(SCOPE, stream1, versioned, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
    AbstractMap.SimpleEntry<Double, Double> segment3 = new AbstractMap.SimpleEntry<>(0.0, 0.5);
    AbstractMap.SimpleEntry<Double, Double> segment4 = new AbstractMap.SimpleEntry<>(0.5, 0.75);
    AbstractMap.SimpleEntry<Double, Double> segment5 = new AbstractMap.SimpleEntry<>(0.75, 1.0);
    List<Long> sealedSegments1 = Arrays.asList(0L, 1L, 2L);
    versioned = streamStore.submitScale(SCOPE, stream2, sealedSegments1, Arrays.asList(segment3, segment4, segment5), start + 20, null, null, executor).get();
    response = versioned.getObject();
    segmentsCreated = response.getNewSegmentsWithRange();
    state = streamStore.getVersionedState(SCOPE, stream2, null, executor).join();
    state = streamStore.updateVersionedState(SCOPE, stream2, State.SCALING, state, null, executor).get();
    versioned = streamStore.startScale(SCOPE, stream2, false, versioned, state, null, executor).join();
    streamStore.scaleCreateNewEpochs(SCOPE, stream2, versioned, null, executor).get();
    streamStore.scaleSegmentsSealed(SCOPE, stream2, sealedSegments1.stream().collect(Collectors.toMap(x -> x, x -> 0L)), versioned, null, executor).get();
    streamStore.completeScale(SCOPE, stream2, versioned, null, executor).join();
    streamStore.setState(SCOPE, stream1, State.ACTIVE, null, executor).get();
// endregion
}
Also used : Arrays(java.util.Arrays) AssertExtensions(io.pravega.test.common.AssertExtensions) Cleanup(lombok.Cleanup) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) VersionedMetadata(io.pravega.controller.store.VersionedMetadata) TaskMetadataStore(io.pravega.controller.store.task.TaskMetadataStore) Map(java.util.Map) After(org.junit.After) LockFailedException(io.pravega.controller.store.task.LockFailedException) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) CreateStreamStatus(io.pravega.controller.stream.api.grpc.v1.Controller.CreateStreamStatus) CompletionException(java.util.concurrent.CompletionException) UUID(java.util.UUID) EqualsAndHashCode(lombok.EqualsAndHashCode) Collectors(java.util.stream.Collectors) TaggedResource(io.pravega.controller.store.task.TaggedResource) Serializable(java.io.Serializable) List(java.util.List) Slf4j(lombok.extern.slf4j.Slf4j) CuratorFramework(org.apache.curator.framework.CuratorFramework) Assert.assertFalse(org.junit.Assert.assertFalse) Optional(java.util.Optional) Resource(io.pravega.controller.store.task.Resource) StreamMetadataStore(io.pravega.controller.store.stream.StreamMetadataStore) GrpcAuthHelper(io.pravega.controller.server.security.auth.GrpcAuthHelper) CuratorFrameworkFactory(org.apache.curator.framework.CuratorFrameworkFactory) StreamStoreFactory(io.pravega.controller.store.stream.StreamStoreFactory) SegmentHelper(io.pravega.controller.server.SegmentHelper) CompletableFuture(java.util.concurrent.CompletableFuture) ArrayList(java.util.ArrayList) RetryOneTime(org.apache.curator.retry.RetryOneTime) TestingServerStarter(io.pravega.test.common.TestingServerStarter) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) TestingServer(org.apache.curator.test.TestingServer) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) HostMonitorConfigImpl(io.pravega.controller.store.host.impl.HostMonitorConfigImpl) Before(org.junit.Before) SegmentHelperMock(io.pravega.controller.mocks.SegmentHelperMock) TestTasks(io.pravega.controller.task.Stream.TestTasks) Assert.assertTrue(org.junit.Assert.assertTrue) Test(org.junit.Test) HostStoreFactory(io.pravega.controller.store.host.HostStoreFactory) ExecutionException(java.util.concurrent.ExecutionException) AbstractMap(java.util.AbstractMap) TaskStoreFactory(io.pravega.controller.store.task.TaskStoreFactory) HostControllerStore(io.pravega.controller.store.host.HostControllerStore) Data(lombok.Data) State(io.pravega.controller.store.stream.State) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Assert(org.junit.Assert) Collections(java.util.Collections) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) Assert.assertEquals(org.junit.Assert.assertEquals) ScalingPolicy(io.pravega.client.stream.ScalingPolicy) RetryOneTime(org.apache.curator.retry.RetryOneTime) TestingServerStarter(io.pravega.test.common.TestingServerStarter) EpochTransitionRecord(io.pravega.controller.store.stream.records.EpochTransitionRecord) AbstractMap(java.util.AbstractMap) State(io.pravega.controller.store.stream.State) StreamConfiguration(io.pravega.client.stream.StreamConfiguration) StreamMetadataTasks(io.pravega.controller.task.Stream.StreamMetadataTasks) Before(org.junit.Before)

Aggregations

StreamMetadataTasks (io.pravega.controller.task.Stream.StreamMetadataTasks)37 StreamTransactionMetadataTasks (io.pravega.controller.task.Stream.StreamTransactionMetadataTasks)27 Before (org.junit.Before)22 BucketStore (io.pravega.controller.store.stream.BucketStore)21 StreamMetadataStore (io.pravega.controller.store.stream.StreamMetadataStore)21 SegmentHelper (io.pravega.controller.server.SegmentHelper)20 Test (org.junit.Test)20 TaskMetadataStore (io.pravega.controller.store.task.TaskMetadataStore)19 TableMetadataTasks (io.pravega.controller.task.KeyValueTable.TableMetadataTasks)18 CompletableFuture (java.util.concurrent.CompletableFuture)17 TestingServerStarter (io.pravega.test.common.TestingServerStarter)15 ScheduledExecutorService (java.util.concurrent.ScheduledExecutorService)14 StreamConfiguration (io.pravega.client.stream.StreamConfiguration)13 AutoScaleTask (io.pravega.controller.server.eventProcessor.requesthandlers.AutoScaleTask)13 ScaleOperationTask (io.pravega.controller.server.eventProcessor.requesthandlers.ScaleOperationTask)13 StreamRequestHandler (io.pravega.controller.server.eventProcessor.requesthandlers.StreamRequestHandler)13 GrpcAuthHelper (io.pravega.controller.server.security.auth.GrpcAuthHelper)13 ScalingPolicy (io.pravega.client.stream.ScalingPolicy)12 StreamStoreFactory (io.pravega.controller.store.stream.StreamStoreFactory)12 TaskStoreFactory (io.pravega.controller.store.task.TaskStoreFactory)12