Search in sources :

Example 11 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class AbstractReconContainerManagerTest method getTestContainer.

protected ContainerWithPipeline getTestContainer(LifeCycleState state) throws IOException {
    ContainerID containerID = ContainerID.valueOf(100L);
    Pipeline pipeline = getRandomPipeline();
    pipelineManager.addPipeline(pipeline);
    ContainerInfo containerInfo = new ContainerInfo.Builder().setContainerID(containerID.getId()).setNumberOfKeys(10).setPipelineID(pipeline.getId()).setReplicationConfig(StandaloneReplicationConfig.getInstance(ONE)).setOwner("test").setState(state).build();
    return new ContainerWithPipeline(containerInfo, pipeline);
}
Also used : ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) ContainerInfo(org.apache.hadoop.hdds.scm.container.ContainerInfo) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) OMMetadataManagerTestUtils.getRandomPipeline(org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getRandomPipeline) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 12 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class TestReconContainerManager method testUpdateAndRemoveContainerReplica.

@Test
public void testUpdateAndRemoveContainerReplica() throws IOException {
    // Sanity checking updateContainerReplica and ContainerReplicaHistory
    // Init Container 1
    final long cIDlong1 = 1L;
    final ContainerID containerID1 = ContainerID.valueOf(cIDlong1);
    // Init DN01
    final UUID uuid1 = UUID.randomUUID();
    final DatanodeDetails datanodeDetails1 = DatanodeDetails.newBuilder().setUuid(uuid1).setHostName("host1").setIpAddress("127.0.0.1").build();
    ContainerReplica containerReplica1 = ContainerReplica.newBuilder().setContainerID(containerID1).setContainerState(State.OPEN).setDatanodeDetails(datanodeDetails1).setSequenceId(1001L).build();
    final ReconContainerManager containerManager = getContainerManager();
    final Map<Long, Map<UUID, ContainerReplicaHistory>> repHistMap = containerManager.getReplicaHistoryMap();
    // Should be empty at the beginning
    Assert.assertEquals(0, repHistMap.size());
    // Put a replica info and call updateContainerReplica
    Pipeline pipeline = getRandomPipeline();
    getPipelineManager().addPipeline(pipeline);
    for (int i = 1; i <= 10; i++) {
        final ContainerInfo info = newContainerInfo(i, pipeline);
        containerManager.addNewContainer(new ContainerWithPipeline(info, pipeline));
    }
    containerManager.updateContainerReplica(containerID1, containerReplica1);
    // Should have 1 container entry in the replica history map
    Assert.assertEquals(1, repHistMap.size());
    // Should only have 1 entry for this replica (on DN01)
    Assert.assertEquals(1, repHistMap.get(cIDlong1).size());
    ContainerReplicaHistory repHist1 = repHistMap.get(cIDlong1).get(uuid1);
    Assert.assertEquals(uuid1, repHist1.getUuid());
    // Because this is a new entry, first seen time equals last seen time
    assertEquals(repHist1.getLastSeenTime(), repHist1.getFirstSeenTime());
    assertEquals(containerReplica1.getSequenceId().longValue(), repHist1.getBcsId());
    // Let's update the entry again
    containerReplica1 = ContainerReplica.newBuilder().setContainerID(containerID1).setContainerState(State.OPEN).setDatanodeDetails(datanodeDetails1).setSequenceId(1051L).build();
    containerManager.updateContainerReplica(containerID1, containerReplica1);
    // Should still have 1 entry in the replica history map
    Assert.assertEquals(1, repHistMap.size());
    // Now last seen time should be larger than first seen time
    Assert.assertTrue(repHist1.getLastSeenTime() > repHist1.getFirstSeenTime());
    assertEquals(1051L, repHist1.getBcsId());
    // Init DN02
    final UUID uuid2 = UUID.randomUUID();
    final DatanodeDetails datanodeDetails2 = DatanodeDetails.newBuilder().setUuid(uuid2).setHostName("host2").setIpAddress("127.0.0.2").build();
    final ContainerReplica containerReplica2 = ContainerReplica.newBuilder().setContainerID(containerID1).setContainerState(State.OPEN).setDatanodeDetails(datanodeDetails2).setSequenceId(1051L).build();
    // Add replica to DN02
    containerManager.updateContainerReplica(containerID1, containerReplica2);
    // Should still have 1 container entry in the replica history map
    Assert.assertEquals(1, repHistMap.size());
    // Should have 2 entries for this replica (on DN01 and DN02)
    Assert.assertEquals(2, repHistMap.get(cIDlong1).size());
    ContainerReplicaHistory repHist2 = repHistMap.get(cIDlong1).get(uuid2);
    Assert.assertEquals(uuid2, repHist2.getUuid());
    // Because this is a new entry, first seen time equals last seen time
    assertEquals(repHist2.getLastSeenTime(), repHist2.getFirstSeenTime());
    assertEquals(1051L, repHist2.getBcsId());
    // Remove replica from DN01
    containerManager.removeContainerReplica(containerID1, containerReplica1);
    // Should still have 1 container entry in the replica history map
    Assert.assertEquals(1, repHistMap.size());
    // Should have 1 entry for this replica
    Assert.assertEquals(1, repHistMap.get(cIDlong1).size());
    // And the only entry should match DN02
    Assert.assertEquals(uuid2, repHistMap.get(cIDlong1).keySet().iterator().next());
}
Also used : ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) OMMetadataManagerTestUtils.getRandomPipeline(org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getRandomPipeline) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) ContainerReplica(org.apache.hadoop.hdds.scm.container.ContainerReplica) MockDatanodeDetails.randomDatanodeDetails(org.apache.hadoop.hdds.protocol.MockDatanodeDetails.randomDatanodeDetails) DatanodeDetails(org.apache.hadoop.hdds.protocol.DatanodeDetails) ContainerInfo(org.apache.hadoop.hdds.scm.container.ContainerInfo) UUID(java.util.UUID) Map(java.util.Map) Test(org.junit.Test)

Example 13 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class TestReconPipelineReportHandler method testProcessPipelineReport.

@Test
public void testProcessPipelineReport() throws IOException {
    // Check with pipeline which does not exist in Recon.
    Pipeline pipeline = getRandomPipeline();
    PipelineID pipelineID = pipeline.getId();
    HddsProtos.PipelineID pipelineIDProto = pipelineID.getProtobuf();
    ReconPipelineManager reconPipelineManagerMock = mock(ReconPipelineManager.class);
    when(reconPipelineManagerMock.getPipeline(pipelineID)).thenReturn(pipeline);
    StorageContainerServiceProvider scmServiceProviderMock = mock(StorageContainerServiceProvider.class);
    when(scmServiceProviderMock.getPipeline(pipelineIDProto)).thenReturn(pipeline);
    OzoneConfiguration configuration = new OzoneConfiguration();
    ReconPipelineReportHandler handler = new ReconPipelineReportHandler(new ReconSafeModeManager(), reconPipelineManagerMock, SCMContext.emptyContext(), configuration, scmServiceProviderMock);
    EventPublisher eventPublisherMock = mock(EventPublisher.class);
    PipelineReport report = mock(PipelineReport.class);
    when(report.getPipelineID()).thenReturn(pipelineIDProto);
    handler.processPipelineReport(report, pipeline.getNodes().get(0), eventPublisherMock);
    // Verify that the new pipeline was added to pipeline manager.
    verify(reconPipelineManagerMock, times(1)).addPipeline(pipeline);
    verify(reconPipelineManagerMock, times(1)).getPipeline(pipelineID);
    // Check with pipeline which already exists in Recon.
    pipeline = getRandomPipeline();
    pipelineID = pipeline.getId();
    pipelineIDProto = pipelineID.getProtobuf();
    when(reconPipelineManagerMock.containsPipeline(pipelineID)).thenReturn(true);
    when(reconPipelineManagerMock.getPipeline(pipelineID)).thenReturn(pipeline);
    when(report.getPipelineID()).thenReturn(pipelineIDProto);
    handler.processPipelineReport(report, pipeline.getNodes().get(0), eventPublisherMock);
    // Verify that the pipeline was not added to pipeline manager.
    verify(reconPipelineManagerMock, times(0)).addPipeline(pipeline);
    verify(reconPipelineManagerMock, times(1)).getPipeline(pipelineID);
}
Also used : PipelineReport(org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.PipelineReport) StorageContainerServiceProvider(org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider) EventPublisher(org.apache.hadoop.hdds.server.events.EventPublisher) HddsProtos(org.apache.hadoop.hdds.protocol.proto.HddsProtos) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) OzoneConfiguration(org.apache.hadoop.hdds.conf.OzoneConfiguration) OMMetadataManagerTestUtils.getRandomPipeline(org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getRandomPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) Test(org.junit.Test)

Example 14 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class ReconPipelineManager method initializePipelines.

/**
 * Bootstrap Recon's pipeline metadata with that from SCM.
 * @param pipelinesFromScm pipelines from SCM.
 * @throws IOException on exception.
 */
void initializePipelines(List<Pipeline> pipelinesFromScm) throws IOException {
    acquireWriteLock();
    try {
        List<Pipeline> pipelinesInHouse = getPipelines();
        LOG.info("Recon has {} pipelines in house.", pipelinesInHouse.size());
        for (Pipeline pipeline : pipelinesFromScm) {
            if (!containsPipeline(pipeline.getId())) {
                // New pipeline got from SCM. Recon does not know anything about it,
                // so let's add it.
                LOG.info("Adding new pipeline {} from SCM.", pipeline.getId());
                addPipeline(pipeline);
            } else {
                // Recon already has this pipeline. Just update state and creation
                // time.
                getStateManager().updatePipelineState(pipeline.getId().getProtobuf(), Pipeline.PipelineState.getProtobuf(pipeline.getPipelineState()));
                getPipeline(pipeline.getId()).setCreationTimestamp(pipeline.getCreationTimestamp());
            }
            removeInvalidPipelines(pipelinesFromScm);
        }
    } finally {
        releaseWriteLock();
    }
}
Also used : Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 15 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class ReconPipelineManager method removeInvalidPipelines.

public void removeInvalidPipelines(List<Pipeline> pipelinesFromScm) {
    acquireWriteLock();
    try {
        List<Pipeline> pipelinesInHouse = getPipelines();
        // Removing pipelines in Recon that are no longer in SCM.
        // TODO Recon may need to track inactive pipelines as well. So this can be
        // removed in a followup JIRA.
        List<Pipeline> invalidPipelines = pipelinesInHouse.stream().filter(p -> !pipelinesFromScm.contains(p)).collect(Collectors.toList());
        invalidPipelines.forEach(p -> {
            PipelineID pipelineID = p.getId();
            if (!p.getPipelineState().equals(CLOSED)) {
                try {
                    getStateManager().updatePipelineState(pipelineID.getProtobuf(), HddsProtos.PipelineState.PIPELINE_CLOSED);
                } catch (IOException e) {
                    LOG.warn("Pipeline {} not found while updating state. ", p.getId(), e);
                }
            }
            try {
                LOG.info("Removing invalid pipeline {} from Recon.", pipelineID);
                closePipeline(p, false);
            } catch (IOException e) {
                LOG.warn("Unable to remove pipeline {}", pipelineID, e);
            }
        });
    } finally {
        releaseWriteLock();
    }
}
Also used : HddsProtos(org.apache.hadoop.hdds.protocol.proto.HddsProtos) NodeManager(org.apache.hadoop.hdds.scm.node.NodeManager) Logger(org.slf4j.Logger) LoggerFactory(org.slf4j.LoggerFactory) IOException(java.io.IOException) SCMContext(org.apache.hadoop.hdds.scm.ha.SCMContext) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) Collectors(java.util.stream.Collectors) List(java.util.List) PipelineFactory(org.apache.hadoop.hdds.scm.pipeline.PipelineFactory) CLOSED(org.apache.hadoop.hdds.scm.pipeline.Pipeline.PipelineState.CLOSED) ConfigurationSource(org.apache.hadoop.hdds.conf.ConfigurationSource) PipelineStateManager(org.apache.hadoop.hdds.scm.pipeline.PipelineStateManager) PipelineStateManagerImpl(org.apache.hadoop.hdds.scm.pipeline.PipelineStateManagerImpl) EventPublisher(org.apache.hadoop.hdds.server.events.EventPublisher) Table(org.apache.hadoop.hdds.utils.db.Table) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) SCMHAManager(org.apache.hadoop.hdds.scm.ha.SCMHAManager) ClientVersion(org.apache.hadoop.ozone.ClientVersion) ZoneOffset(java.time.ZoneOffset) VisibleForTesting(com.google.common.annotations.VisibleForTesting) PipelineManagerImpl(org.apache.hadoop.hdds.scm.pipeline.PipelineManagerImpl) MonotonicClock(org.apache.hadoop.ozone.common.MonotonicClock) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) IOException(java.io.IOException) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Aggregations

Pipeline (org.apache.hadoop.hdds.scm.pipeline.Pipeline)199 DatanodeDetails (org.apache.hadoop.hdds.protocol.DatanodeDetails)73 Test (org.junit.Test)68 ArrayList (java.util.ArrayList)56 ContainerInfo (org.apache.hadoop.hdds.scm.container.ContainerInfo)51 IOException (java.io.IOException)47 Test (org.junit.jupiter.api.Test)42 BlockID (org.apache.hadoop.hdds.client.BlockID)41 OmKeyLocationInfo (org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo)40 ContainerWithPipeline (org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline)39 OzoneOutputStream (org.apache.hadoop.ozone.client.io.OzoneOutputStream)37 XceiverClientSpi (org.apache.hadoop.hdds.scm.XceiverClientSpi)32 OzoneConfiguration (org.apache.hadoop.hdds.conf.OzoneConfiguration)29 MockPipeline (org.apache.hadoop.hdds.scm.pipeline.MockPipeline)28 KeyOutputStream (org.apache.hadoop.ozone.client.io.KeyOutputStream)27 List (java.util.List)26 OmKeyInfo (org.apache.hadoop.ozone.om.helpers.OmKeyInfo)26 ContainerProtos (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos)24 HashMap (java.util.HashMap)23 PipelineID (org.apache.hadoop.hdds.scm.pipeline.PipelineID)21