Search in sources :

Example 1 with Flow2Handler

use of com.sequenceiq.cloudbreak.core.flow2.Flow2Handler in project cloudbreak by hortonworks.

the class HeartbeatServiceTest method testDistributionConcurrency.

@Test
public void testDistributionConcurrency() {
    List<CloudbreakNode> clusterNodes = getClusterNodes();
    // myself
    clusterNodes.get(0).setLastUpdated(200_000);
    // failed node
    clusterNodes.get(1).setLastUpdated(50_000);
    // active node
    clusterNodes.get(2).setLastUpdated(200_000);
    when(cloudbreakNodeRepository.findAll()).thenReturn(clusterNodes);
    when(clock.getCurrentTime()).thenReturn(200_000L);
    // all flows that need to be re-distributed
    List<String> suspendedFlows = new ArrayList<>();
    List<FlowLog> node1FlowLogs = getFlowLogs(3, 5000);
    suspendedFlows.addAll(node1FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_1_ID)).thenReturn(new HashSet<>(node1FlowLogs));
    Map<CloudbreakNode, List<String>> distribution = new HashMap<>();
    distribution.computeIfAbsent(clusterNodes.get(0), v -> new ArrayList<>()).addAll(Arrays.asList(suspendedFlows.get(0), suspendedFlows.get(2)));
    distribution.computeIfAbsent(clusterNodes.get(2), v -> new ArrayList<>()).addAll(Collections.singletonList(suspendedFlows.get(1)));
    when(flowDistributor.distribute(any(), any())).thenReturn(distribution);
    Set<FlowLog> myNewFlowLogs = new HashSet<>();
    myNewFlowLogs.addAll(node1FlowLogs.stream().filter(fl -> fl.getFlowId().equalsIgnoreCase(suspendedFlows.get(0))).collect(Collectors.toList()));
    myNewFlowLogs.addAll(node1FlowLogs.stream().filter(fl -> fl.getFlowId().equalsIgnoreCase(suspendedFlows.get(2))).collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(MY_ID)).thenReturn(myNewFlowLogs);
    when(runningFlows.get(any())).thenReturn(null);
    when(flowLogRepository.save(anyCollection())).thenThrow(new OptimisticLockingFailureException("Someone already distributed the flows.."));
    heartbeatService.scheduledFlowDistribution();
    verify(flow2Handler, times(2)).restartFlow(stringCaptor.capture());
    List<String> allFlowIds = stringCaptor.getAllValues();
    assertEquals(2, allFlowIds.size());
    for (FlowLog flowLog : myNewFlowLogs) {
        assertTrue(allFlowIds.contains(flowLog.getFlowId()));
    }
}
Also used : Arrays(java.util.Arrays) StackRepository(com.sequenceiq.cloudbreak.repository.StackRepository) Mock(org.mockito.Mock) Matchers.anyCollection(org.mockito.Matchers.anyCollection) RunWith(org.junit.runner.RunWith) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) Random(java.util.Random) Supplier(java.util.function.Supplier) Captor(org.mockito.Captor) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Status(com.sequenceiq.cloudbreak.api.model.Status) Flow2Handler(com.sequenceiq.cloudbreak.core.flow2.Flow2Handler) ArgumentCaptor(org.mockito.ArgumentCaptor) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) Map(java.util.Map) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) Before(org.junit.Before) InjectMocks(org.mockito.InjectMocks) CloudbreakNodeRepository(com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository) FlowRegister(com.sequenceiq.cloudbreak.core.flow2.FlowRegister) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) InMemoryStateStore(com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) ReflectionTestUtils(org.springframework.test.util.ReflectionTestUtils) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) StackCreationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig) Collectors(java.util.stream.Collectors) FlowDistributor(com.sequenceiq.cloudbreak.service.ha.FlowDistributor) Mockito.verify(org.mockito.Mockito.verify) Matchers.any(org.mockito.Matchers.any) HeartbeatService(com.sequenceiq.cloudbreak.service.ha.HeartbeatService) List(java.util.List) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) FlowLogRepository(com.sequenceiq.cloudbreak.repository.FlowLogRepository) PollGroup(com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup) Collections(java.util.Collections) CloudbreakNodeConfig(com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig) Assert.assertEquals(org.junit.Assert.assertEquals) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) ArrayList(java.util.ArrayList) Matchers.anyString(org.mockito.Matchers.anyString) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 2 with Flow2Handler

use of com.sequenceiq.cloudbreak.core.flow2.Flow2Handler in project cloudbreak by hortonworks.

the class HeartbeatServiceTest method testOneNodeTakesAllFlowsWithInvalidFlows.

@Test
public void testOneNodeTakesAllFlowsWithInvalidFlows() {
    List<CloudbreakNode> clusterNodes = getClusterNodes();
    // myself
    clusterNodes.get(0).setLastUpdated(200_000);
    // set all nodes to failed except myself
    for (int i = 1; i < clusterNodes.size(); i++) {
        CloudbreakNode node = clusterNodes.get(i);
        node.setLastUpdated(50_000);
    }
    when(cloudbreakNodeRepository.findAll()).thenReturn(clusterNodes);
    when(clock.getCurrentTime()).thenReturn(200_000L);
    // all flows that need to be re-distributed
    List<String> suspendedFlows = new ArrayList<>();
    List<FlowLog> node1FlowLogs = getFlowLogs(2, 5000);
    suspendedFlows.addAll(node1FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_1_ID)).thenReturn(new HashSet<>(node1FlowLogs));
    Set<FlowLog> node2FlowLogs = new HashSet<>(getFlowLogs(3, 3000));
    suspendedFlows.addAll(node2FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_2_ID)).thenReturn(node2FlowLogs);
    Map<CloudbreakNode, List<String>> distribution = new HashMap<>();
    distribution.computeIfAbsent(clusterNodes.get(0), v -> new ArrayList<>()).addAll(Arrays.asList(suspendedFlows.get(0), suspendedFlows.get(1), suspendedFlows.get(2), suspendedFlows.get(3), suspendedFlows.get(4)));
    when(flowDistributor.distribute(any(), any())).thenReturn(distribution);
    Set<FlowLog> myNewFlowLogs = new HashSet<>();
    myNewFlowLogs.addAll(node1FlowLogs);
    myNewFlowLogs.addAll(node2FlowLogs);
    when(flowLogRepository.findAllByCloudbreakNodeId(MY_ID)).thenReturn(myNewFlowLogs);
    when(runningFlows.get(any())).thenReturn(null);
    List<Long> stackIds = myNewFlowLogs.stream().map(FlowLog::getStackId).distinct().collect(Collectors.toList());
    List<Object[]> statusResponse = new ArrayList<>();
    statusResponse.add(new Object[] { stackIds.get(0), Status.DELETE_IN_PROGRESS });
    statusResponse.add(new Object[] { stackIds.get(2), Status.DELETE_IN_PROGRESS });
    when(stackRepository.findStackStatuses(any())).thenReturn(statusResponse);
    List<FlowLog> invalidFlowLogs = myNewFlowLogs.stream().filter(fl -> fl.getStackId().equals(stackIds.get(0)) || fl.getStackId().equals(stackIds.get(2))).collect(Collectors.toList());
    heartbeatService.scheduledFlowDistribution();
    verify(flowLogRepository).save(flowLogListCaptor.capture());
    List<FlowLog> updatedFlows = flowLogListCaptor.getValue();
    assertEquals(myNewFlowLogs.size(), updatedFlows.size());
    for (FlowLog updatedFlow : updatedFlows) {
        if (invalidFlowLogs.contains(updatedFlow)) {
            assertTrue(updatedFlow.getFinalized());
            assertEquals(null, updatedFlow.getCloudbreakNodeId());
        } else {
            assertEquals(MY_ID, updatedFlow.getCloudbreakNodeId());
        }
    }
    verify(flow2Handler, times(5)).restartFlow(stringCaptor.capture());
    List<String> allFlowIds = stringCaptor.getAllValues();
    assertEquals(5, allFlowIds.size());
    for (String flowId : suspendedFlows) {
        assertTrue(allFlowIds.contains(flowId));
    }
}
Also used : Arrays(java.util.Arrays) StackRepository(com.sequenceiq.cloudbreak.repository.StackRepository) Mock(org.mockito.Mock) Matchers.anyCollection(org.mockito.Matchers.anyCollection) RunWith(org.junit.runner.RunWith) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) Random(java.util.Random) Supplier(java.util.function.Supplier) Captor(org.mockito.Captor) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Status(com.sequenceiq.cloudbreak.api.model.Status) Flow2Handler(com.sequenceiq.cloudbreak.core.flow2.Flow2Handler) ArgumentCaptor(org.mockito.ArgumentCaptor) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) Map(java.util.Map) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) Before(org.junit.Before) InjectMocks(org.mockito.InjectMocks) CloudbreakNodeRepository(com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository) FlowRegister(com.sequenceiq.cloudbreak.core.flow2.FlowRegister) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) InMemoryStateStore(com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) ReflectionTestUtils(org.springframework.test.util.ReflectionTestUtils) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) StackCreationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig) Collectors(java.util.stream.Collectors) FlowDistributor(com.sequenceiq.cloudbreak.service.ha.FlowDistributor) Mockito.verify(org.mockito.Mockito.verify) Matchers.any(org.mockito.Matchers.any) HeartbeatService(com.sequenceiq.cloudbreak.service.ha.HeartbeatService) List(java.util.List) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) FlowLogRepository(com.sequenceiq.cloudbreak.repository.FlowLogRepository) PollGroup(com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup) Collections(java.util.Collections) CloudbreakNodeConfig(com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig) Assert.assertEquals(org.junit.Assert.assertEquals) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) ArrayList(java.util.ArrayList) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 3 with Flow2Handler

use of com.sequenceiq.cloudbreak.core.flow2.Flow2Handler in project cloudbreak by hortonworks.

the class HeartbeatServiceTest method testOneNodeTakesAllFlowsWithTerminationFlowShouldBeDistributed.

@Test
public void testOneNodeTakesAllFlowsWithTerminationFlowShouldBeDistributed() {
    List<CloudbreakNode> clusterNodes = getClusterNodes();
    // myself
    clusterNodes.get(0).setLastUpdated(200_000);
    // set all nodes to failed except myself
    for (int i = 1; i < clusterNodes.size(); i++) {
        CloudbreakNode node = clusterNodes.get(i);
        node.setLastUpdated(50_000);
    }
    when(cloudbreakNodeRepository.findAll()).thenReturn(clusterNodes);
    when(clock.getCurrentTime()).thenReturn(200_000L);
    // all flows that need to be re-distributed
    List<String> suspendedFlows = new ArrayList<>();
    List<FlowLog> node1FlowLogs = getFlowLogs(2, 5000);
    node1FlowLogs.forEach(fl -> fl.setFlowType(StackTerminationFlowConfig.class));
    suspendedFlows.addAll(node1FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_1_ID)).thenReturn(new HashSet<>(node1FlowLogs));
    Set<FlowLog> node2FlowLogs = new HashSet<>(getFlowLogs(3, 3000));
    node2FlowLogs.forEach(fl -> fl.setFlowType(StackTerminationFlowConfig.class));
    suspendedFlows.addAll(node2FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_2_ID)).thenReturn(node2FlowLogs);
    Map<CloudbreakNode, List<String>> distribution = new HashMap<>();
    distribution.computeIfAbsent(clusterNodes.get(0), v -> new ArrayList<>()).addAll(Arrays.asList(suspendedFlows.get(0), suspendedFlows.get(1), suspendedFlows.get(2), suspendedFlows.get(3), suspendedFlows.get(4)));
    when(flowDistributor.distribute(any(), any())).thenReturn(distribution);
    Set<FlowLog> myNewFlowLogs = new HashSet<>();
    myNewFlowLogs.addAll(node1FlowLogs);
    myNewFlowLogs.addAll(node2FlowLogs);
    when(flowLogRepository.findAllByCloudbreakNodeId(MY_ID)).thenReturn(myNewFlowLogs);
    when(runningFlows.get(any())).thenReturn(null);
    List<Long> stackIds = myNewFlowLogs.stream().map(FlowLog::getStackId).distinct().collect(Collectors.toList());
    List<Object[]> statusResponse = new ArrayList<>();
    statusResponse.add(new Object[] { stackIds.get(0), Status.DELETE_IN_PROGRESS });
    statusResponse.add(new Object[] { stackIds.get(2), Status.DELETE_IN_PROGRESS });
    when(stackRepository.findStackStatuses(any())).thenReturn(statusResponse);
    heartbeatService.scheduledFlowDistribution();
    verify(flowLogRepository).save(flowLogListCaptor.capture());
    List<FlowLog> updatedFlows = flowLogListCaptor.getValue();
    assertEquals(myNewFlowLogs.size(), updatedFlows.size());
    for (FlowLog updatedFlow : updatedFlows) {
        assertEquals(MY_ID, updatedFlow.getCloudbreakNodeId());
    }
    verify(flow2Handler, times(5)).restartFlow(stringCaptor.capture());
    List<String> allFlowIds = stringCaptor.getAllValues();
    assertEquals(5, allFlowIds.size());
    for (String flowId : suspendedFlows) {
        assertTrue(allFlowIds.contains(flowId));
    }
}
Also used : Arrays(java.util.Arrays) StackRepository(com.sequenceiq.cloudbreak.repository.StackRepository) Mock(org.mockito.Mock) Matchers.anyCollection(org.mockito.Matchers.anyCollection) RunWith(org.junit.runner.RunWith) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) Random(java.util.Random) Supplier(java.util.function.Supplier) Captor(org.mockito.Captor) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Status(com.sequenceiq.cloudbreak.api.model.Status) Flow2Handler(com.sequenceiq.cloudbreak.core.flow2.Flow2Handler) ArgumentCaptor(org.mockito.ArgumentCaptor) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) Map(java.util.Map) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) Before(org.junit.Before) InjectMocks(org.mockito.InjectMocks) CloudbreakNodeRepository(com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository) FlowRegister(com.sequenceiq.cloudbreak.core.flow2.FlowRegister) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) InMemoryStateStore(com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) ReflectionTestUtils(org.springframework.test.util.ReflectionTestUtils) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) StackCreationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig) Collectors(java.util.stream.Collectors) FlowDistributor(com.sequenceiq.cloudbreak.service.ha.FlowDistributor) Mockito.verify(org.mockito.Mockito.verify) Matchers.any(org.mockito.Matchers.any) HeartbeatService(com.sequenceiq.cloudbreak.service.ha.HeartbeatService) List(java.util.List) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) FlowLogRepository(com.sequenceiq.cloudbreak.repository.FlowLogRepository) PollGroup(com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup) Collections(java.util.Collections) CloudbreakNodeConfig(com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig) Assert.assertEquals(org.junit.Assert.assertEquals) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) ArrayList(java.util.ArrayList) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with Flow2Handler

use of com.sequenceiq.cloudbreak.core.flow2.Flow2Handler in project cloudbreak by hortonworks.

the class HeartbeatServiceTest method testOneNodeTakesAllFlows.

@Test
public void testOneNodeTakesAllFlows() {
    List<CloudbreakNode> clusterNodes = getClusterNodes();
    // myself
    clusterNodes.get(0).setLastUpdated(200_000);
    // set all nodes to failed except myself
    for (int i = 1; i < clusterNodes.size(); i++) {
        CloudbreakNode node = clusterNodes.get(i);
        node.setLastUpdated(50_000);
    }
    when(cloudbreakNodeRepository.findAll()).thenReturn(clusterNodes);
    when(clock.getCurrentTime()).thenReturn(200_000L);
    // all flows that need to be re-distributed
    List<String> suspendedFlows = new ArrayList<>();
    List<FlowLog> node1FlowLogs = getFlowLogs(2, 5000);
    suspendedFlows.addAll(node1FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_1_ID)).thenReturn(new HashSet<>(node1FlowLogs));
    Set<FlowLog> node2FlowLogs = new HashSet<>(getFlowLogs(3, 3000));
    suspendedFlows.addAll(node2FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_2_ID)).thenReturn(node2FlowLogs);
    Map<CloudbreakNode, List<String>> distribution = new HashMap<>();
    distribution.computeIfAbsent(clusterNodes.get(0), v -> new ArrayList<>()).addAll(Arrays.asList(suspendedFlows.get(0), suspendedFlows.get(1), suspendedFlows.get(2), suspendedFlows.get(3), suspendedFlows.get(4)));
    when(flowDistributor.distribute(any(), any())).thenReturn(distribution);
    Set<FlowLog> myNewFlowLogs = new HashSet<>();
    myNewFlowLogs.addAll(node1FlowLogs);
    myNewFlowLogs.addAll(node2FlowLogs);
    when(flowLogRepository.findAllByCloudbreakNodeId(MY_ID)).thenReturn(myNewFlowLogs);
    when(runningFlows.get(any())).thenReturn(null);
    heartbeatService.scheduledFlowDistribution();
    verify(flowLogRepository).save(flowLogListCaptor.capture());
    List<FlowLog> updatedFlows = flowLogListCaptor.getValue();
    assertEquals(myNewFlowLogs.size(), updatedFlows.size());
    for (FlowLog updatedFlow : updatedFlows) {
        assertEquals(MY_ID, updatedFlow.getCloudbreakNodeId());
    }
    verify(flow2Handler, times(5)).restartFlow(stringCaptor.capture());
    List<String> allFlowIds = stringCaptor.getAllValues();
    assertEquals(5, allFlowIds.size());
    for (String flowId : suspendedFlows) {
        assertTrue(allFlowIds.contains(flowId));
    }
}
Also used : Arrays(java.util.Arrays) StackRepository(com.sequenceiq.cloudbreak.repository.StackRepository) Mock(org.mockito.Mock) Matchers.anyCollection(org.mockito.Matchers.anyCollection) RunWith(org.junit.runner.RunWith) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) Random(java.util.Random) Supplier(java.util.function.Supplier) Captor(org.mockito.Captor) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Status(com.sequenceiq.cloudbreak.api.model.Status) Flow2Handler(com.sequenceiq.cloudbreak.core.flow2.Flow2Handler) ArgumentCaptor(org.mockito.ArgumentCaptor) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) Map(java.util.Map) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) Before(org.junit.Before) InjectMocks(org.mockito.InjectMocks) CloudbreakNodeRepository(com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository) FlowRegister(com.sequenceiq.cloudbreak.core.flow2.FlowRegister) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) InMemoryStateStore(com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) ReflectionTestUtils(org.springframework.test.util.ReflectionTestUtils) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) StackCreationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig) Collectors(java.util.stream.Collectors) FlowDistributor(com.sequenceiq.cloudbreak.service.ha.FlowDistributor) Mockito.verify(org.mockito.Mockito.verify) Matchers.any(org.mockito.Matchers.any) HeartbeatService(com.sequenceiq.cloudbreak.service.ha.HeartbeatService) List(java.util.List) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) FlowLogRepository(com.sequenceiq.cloudbreak.repository.FlowLogRepository) PollGroup(com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup) Collections(java.util.Collections) CloudbreakNodeConfig(com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig) Assert.assertEquals(org.junit.Assert.assertEquals) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) ArrayList(java.util.ArrayList) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 5 with Flow2Handler

use of com.sequenceiq.cloudbreak.core.flow2.Flow2Handler in project cloudbreak by hortonworks.

the class HeartbeatServiceTest method testOneNodeTakesAllFlowsWithCleanup.

@Test
public void testOneNodeTakesAllFlowsWithCleanup() {
    List<CloudbreakNode> clusterNodes = getClusterNodes();
    // myself
    clusterNodes.get(0).setLastUpdated(200_000);
    // set all nodes to failed except myself
    for (int i = 1; i < clusterNodes.size(); i++) {
        CloudbreakNode node = clusterNodes.get(i);
        node.setLastUpdated(50_000);
    }
    when(cloudbreakNodeRepository.findAll()).thenReturn(clusterNodes);
    when(clock.getCurrentTime()).thenReturn(200_000L);
    // all flows that need to be re-distributed
    List<String> suspendedFlows = new ArrayList<>();
    List<FlowLog> node1FlowLogs = getFlowLogs(2, 5000);
    suspendedFlows.addAll(node1FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_1_ID)).thenReturn(new HashSet<>(node1FlowLogs)).thenReturn(Collections.emptySet());
    Set<FlowLog> node2FlowLogs = new HashSet<>(getFlowLogs(3, 3000));
    suspendedFlows.addAll(node2FlowLogs.stream().map(FlowLog::getFlowId).distinct().collect(Collectors.toList()));
    when(flowLogRepository.findAllByCloudbreakNodeId(NODE_2_ID)).thenReturn(node2FlowLogs).thenReturn(Collections.emptySet());
    Map<CloudbreakNode, List<String>> distribution = new HashMap<>();
    distribution.computeIfAbsent(clusterNodes.get(0), v -> new ArrayList<>()).addAll(Arrays.asList(suspendedFlows.get(0), suspendedFlows.get(1), suspendedFlows.get(2), suspendedFlows.get(3), suspendedFlows.get(4)));
    when(flowDistributor.distribute(any(), any())).thenReturn(distribution);
    Set<FlowLog> myNewFlowLogs = new HashSet<>();
    myNewFlowLogs.addAll(node1FlowLogs);
    myNewFlowLogs.addAll(node2FlowLogs);
    when(flowLogRepository.findAllByCloudbreakNodeId(MY_ID)).thenReturn(myNewFlowLogs);
    when(runningFlows.get(any())).thenReturn(null);
    heartbeatService.scheduledFlowDistribution();
    verify(flowLogRepository).save(flowLogListCaptor.capture());
    List<FlowLog> updatedFlows = flowLogListCaptor.getValue();
    assertEquals(myNewFlowLogs.size(), updatedFlows.size());
    for (FlowLog updatedFlow : updatedFlows) {
        assertEquals(MY_ID, updatedFlow.getCloudbreakNodeId());
    }
    verify(flow2Handler, times(5)).restartFlow(stringCaptor.capture());
    List<String> allFlowIds = stringCaptor.getAllValues();
    assertEquals(5, allFlowIds.size());
    for (String flowId : suspendedFlows) {
        assertTrue(allFlowIds.contains(flowId));
    }
}
Also used : Arrays(java.util.Arrays) StackRepository(com.sequenceiq.cloudbreak.repository.StackRepository) Mock(org.mockito.Mock) Matchers.anyCollection(org.mockito.Matchers.anyCollection) RunWith(org.junit.runner.RunWith) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) Random(java.util.Random) Supplier(java.util.function.Supplier) Captor(org.mockito.Captor) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) Status(com.sequenceiq.cloudbreak.api.model.Status) Flow2Handler(com.sequenceiq.cloudbreak.core.flow2.Flow2Handler) ArgumentCaptor(org.mockito.ArgumentCaptor) OptimisticLockingFailureException(org.springframework.dao.OptimisticLockingFailureException) Map(java.util.Map) StackTerminationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig) Before(org.junit.Before) InjectMocks(org.mockito.InjectMocks) CloudbreakNodeRepository(com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository) FlowRegister(com.sequenceiq.cloudbreak.core.flow2.FlowRegister) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) InMemoryStateStore(com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore) Assert.assertTrue(org.junit.Assert.assertTrue) Set(java.util.Set) ReflectionTestUtils(org.springframework.test.util.ReflectionTestUtils) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) StackCreationFlowConfig(com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig) Collectors(java.util.stream.Collectors) FlowDistributor(com.sequenceiq.cloudbreak.service.ha.FlowDistributor) Mockito.verify(org.mockito.Mockito.verify) Matchers.any(org.mockito.Matchers.any) HeartbeatService(com.sequenceiq.cloudbreak.service.ha.HeartbeatService) List(java.util.List) MockitoJUnitRunner(org.mockito.runners.MockitoJUnitRunner) FlowLogRepository(com.sequenceiq.cloudbreak.repository.FlowLogRepository) PollGroup(com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup) Collections(java.util.Collections) CloudbreakNodeConfig(com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig) Assert.assertEquals(org.junit.Assert.assertEquals) FlowLog(com.sequenceiq.cloudbreak.domain.FlowLog) HashMap(java.util.HashMap) CloudbreakNode(com.sequenceiq.cloudbreak.domain.CloudbreakNode) ArrayList(java.util.ArrayList) Matchers.anyString(org.mockito.Matchers.anyString) ArrayList(java.util.ArrayList) List(java.util.List) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

Status (com.sequenceiq.cloudbreak.api.model.Status)6 PollGroup (com.sequenceiq.cloudbreak.cloud.scheduler.PollGroup)6 InMemoryStateStore (com.sequenceiq.cloudbreak.cloud.store.InMemoryStateStore)6 Flow2Handler (com.sequenceiq.cloudbreak.core.flow2.Flow2Handler)6 FlowRegister (com.sequenceiq.cloudbreak.core.flow2.FlowRegister)6 StackCreationFlowConfig (com.sequenceiq.cloudbreak.core.flow2.stack.provision.StackCreationFlowConfig)6 StackTerminationFlowConfig (com.sequenceiq.cloudbreak.core.flow2.stack.termination.StackTerminationFlowConfig)6 CloudbreakNode (com.sequenceiq.cloudbreak.domain.CloudbreakNode)6 FlowLog (com.sequenceiq.cloudbreak.domain.FlowLog)6 CloudbreakNodeConfig (com.sequenceiq.cloudbreak.ha.CloudbreakNodeConfig)6 CloudbreakNodeRepository (com.sequenceiq.cloudbreak.repository.CloudbreakNodeRepository)6 FlowLogRepository (com.sequenceiq.cloudbreak.repository.FlowLogRepository)6 StackRepository (com.sequenceiq.cloudbreak.repository.StackRepository)6 FlowDistributor (com.sequenceiq.cloudbreak.service.ha.FlowDistributor)6 HeartbeatService (com.sequenceiq.cloudbreak.service.ha.HeartbeatService)6 ArrayList (java.util.ArrayList)6 Arrays (java.util.Arrays)6 Collections (java.util.Collections)6 HashMap (java.util.HashMap)6 HashSet (java.util.HashSet)6