Search in sources :

Example 6 with PipelineID

use of org.apache.hadoop.hdds.scm.pipeline.PipelineID in project ozone by apache.

the class HealthyReadOnlyNodeHandler method onMessage.

@Override
public void onMessage(DatanodeDetails datanodeDetails, EventPublisher publisher) {
    LOG.info("Datanode {} moved to HEALTHY READONLY state.", datanodeDetails);
    Set<PipelineID> pipelineIDs = nodeManager.getPipelines(datanodeDetails);
    for (PipelineID id : pipelineIDs) {
        LOG.info("Closing pipeline {} which uses HEALTHY READONLY datanode {} ", id, datanodeDetails);
        try {
            pipelineManager.closePipeline(pipelineManager.getPipeline(id), false);
        } catch (IOException ex) {
            LOG.error("Failed to close pipeline {} which uses HEALTHY READONLY " + "datanode {}: ", id, datanodeDetails, ex);
        }
    }
    // add node back if it is not present in networkTopology
    NetworkTopology nt = nodeManager.getClusterNetworkTopologyMap();
    if (!nt.contains(datanodeDetails)) {
        nt.add(datanodeDetails);
        // make sure after DN is added back into topology, DatanodeDetails
        // instance returned from nodeStateManager has parent correctly set.
        Preconditions.checkNotNull(nodeManager.getNodeByUuid(datanodeDetails.getUuidString()).getParent());
    }
}
Also used : NetworkTopology(org.apache.hadoop.hdds.scm.net.NetworkTopology) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) IOException(java.io.IOException)

Example 7 with PipelineID

use of org.apache.hadoop.hdds.scm.pipeline.PipelineID in project ozone by apache.

the class StaleNodeHandler method onMessage.

@Override
public void onMessage(DatanodeDetails datanodeDetails, EventPublisher publisher) {
    Set<PipelineID> pipelineIds = nodeManager.getPipelines(datanodeDetails);
    LOG.info("Datanode {} moved to stale state. Finalizing its pipelines {}", datanodeDetails, pipelineIds);
    for (PipelineID pipelineID : pipelineIds) {
        try {
            Pipeline pipeline = pipelineManager.getPipeline(pipelineID);
            pipelineManager.closePipeline(pipeline, true);
        } catch (IOException e) {
            LOG.info("Could not finalize pipeline={} for dn={}", pipelineID, datanodeDetails);
        }
    }
}
Also used : PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) IOException(java.io.IOException) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 8 with PipelineID

use of org.apache.hadoop.hdds.scm.pipeline.PipelineID in project ozone by apache.

the class StartDatanodeAdminHandler method onMessage.

@Override
public void onMessage(DatanodeDetails datanodeDetails, EventPublisher publisher) {
    Set<PipelineID> pipelineIds = nodeManager.getPipelines(datanodeDetails);
    LOG.info("Admin start on datanode {}. Finalizing its pipelines {}", datanodeDetails, pipelineIds);
    for (PipelineID pipelineID : pipelineIds) {
        try {
            Pipeline pipeline = pipelineManager.getPipeline(pipelineID);
            pipelineManager.closePipeline(pipeline, false);
        } catch (IOException e) {
            LOG.info("Could not finalize pipeline={} for dn={}", pipelineID, datanodeDetails);
        }
    }
}
Also used : PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) IOException(java.io.IOException) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 9 with PipelineID

use of org.apache.hadoop.hdds.scm.pipeline.PipelineID in project ozone by apache.

the class TestSCMInstallSnapshot method testInstallCheckPoint.

@Test
public void testInstallCheckPoint() throws Exception {
    DBCheckpoint checkpoint = downloadSnapshot();
    StorageContainerManager scm = cluster.getStorageContainerManager();
    DBStore db = HAUtils.loadDB(conf, checkpoint.getCheckpointLocation().getParent().toFile(), checkpoint.getCheckpointLocation().getFileName().toString(), new SCMDBDefinition());
    // Hack the transaction index in the checkpoint so as to ensure the
    // checkpointed transaction index is higher than when it was downloaded
    // from.
    Assert.assertNotNull(db);
    HAUtils.getTransactionInfoTable(db, new SCMDBDefinition()).put(OzoneConsts.TRANSACTION_INFO_KEY, TransactionInfo.builder().setCurrentTerm(10).setTransactionIndex(100).build());
    db.close();
    ContainerID cid = scm.getContainerManager().getContainers().get(0).containerID();
    PipelineID pipelineID = scm.getPipelineManager().getPipelines().get(0).getId();
    scm.getScmMetadataStore().getPipelineTable().delete(pipelineID);
    scm.getContainerManager().deleteContainer(cid);
    Assert.assertNull(scm.getScmMetadataStore().getPipelineTable().get(pipelineID));
    Assert.assertFalse(scm.getContainerManager().containerExist(cid));
    SCMStateMachine sm = scm.getScmHAManager().getRatisServer().getSCMStateMachine();
    sm.pause();
    sm.setInstallingDBCheckpoint(checkpoint);
    sm.reinitialize();
    Assert.assertNotNull(scm.getScmMetadataStore().getPipelineTable().get(pipelineID));
    Assert.assertNotNull(scm.getScmMetadataStore().getContainerTable().get(cid));
    Assert.assertTrue(scm.getPipelineManager().containsPipeline(pipelineID));
    Assert.assertTrue(scm.getContainerManager().containerExist(cid));
    Assert.assertEquals(100, scm.getScmMetadataStore().getTransactionInfoTable().get(OzoneConsts.TRANSACTION_INFO_KEY).getTransactionIndex());
    Assert.assertEquals(100, scm.getScmHAManager().asSCMHADBTransactionBuffer().getLatestTrxInfo().getTermIndex().getIndex());
}
Also used : StorageContainerManager(org.apache.hadoop.hdds.scm.server.StorageContainerManager) ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) DBCheckpoint(org.apache.hadoop.hdds.utils.db.DBCheckpoint) SCMDBDefinition(org.apache.hadoop.hdds.scm.metadata.SCMDBDefinition) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) SCMStateMachine(org.apache.hadoop.hdds.scm.ha.SCMStateMachine) DBStore(org.apache.hadoop.hdds.utils.db.DBStore) Test(org.junit.Test)

Example 10 with PipelineID

use of org.apache.hadoop.hdds.scm.pipeline.PipelineID in project ozone by apache.

the class TestReconPipelineReportHandler method testProcessPipelineReport.

@Test
public void testProcessPipelineReport() throws IOException {
    // Check with pipeline which does not exist in Recon.
    Pipeline pipeline = getRandomPipeline();
    PipelineID pipelineID = pipeline.getId();
    HddsProtos.PipelineID pipelineIDProto = pipelineID.getProtobuf();
    ReconPipelineManager reconPipelineManagerMock = mock(ReconPipelineManager.class);
    when(reconPipelineManagerMock.getPipeline(pipelineID)).thenReturn(pipeline);
    StorageContainerServiceProvider scmServiceProviderMock = mock(StorageContainerServiceProvider.class);
    when(scmServiceProviderMock.getPipeline(pipelineIDProto)).thenReturn(pipeline);
    OzoneConfiguration configuration = new OzoneConfiguration();
    ReconPipelineReportHandler handler = new ReconPipelineReportHandler(new ReconSafeModeManager(), reconPipelineManagerMock, SCMContext.emptyContext(), configuration, scmServiceProviderMock);
    EventPublisher eventPublisherMock = mock(EventPublisher.class);
    PipelineReport report = mock(PipelineReport.class);
    when(report.getPipelineID()).thenReturn(pipelineIDProto);
    handler.processPipelineReport(report, pipeline.getNodes().get(0), eventPublisherMock);
    // Verify that the new pipeline was added to pipeline manager.
    verify(reconPipelineManagerMock, times(1)).addPipeline(pipeline);
    verify(reconPipelineManagerMock, times(1)).getPipeline(pipelineID);
    // Check with pipeline which already exists in Recon.
    pipeline = getRandomPipeline();
    pipelineID = pipeline.getId();
    pipelineIDProto = pipelineID.getProtobuf();
    when(reconPipelineManagerMock.containsPipeline(pipelineID)).thenReturn(true);
    when(reconPipelineManagerMock.getPipeline(pipelineID)).thenReturn(pipeline);
    when(report.getPipelineID()).thenReturn(pipelineIDProto);
    handler.processPipelineReport(report, pipeline.getNodes().get(0), eventPublisherMock);
    // Verify that the pipeline was not added to pipeline manager.
    verify(reconPipelineManagerMock, times(0)).addPipeline(pipeline);
    verify(reconPipelineManagerMock, times(1)).getPipeline(pipelineID);
}
Also used : PipelineReport(org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.PipelineReport) StorageContainerServiceProvider(org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider) EventPublisher(org.apache.hadoop.hdds.server.events.EventPublisher) HddsProtos(org.apache.hadoop.hdds.protocol.proto.HddsProtos) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) OzoneConfiguration(org.apache.hadoop.hdds.conf.OzoneConfiguration) OMMetadataManagerTestUtils.getRandomPipeline(org.apache.hadoop.ozone.recon.OMMetadataManagerTestUtils.getRandomPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) Test(org.junit.Test)

Aggregations

PipelineID (org.apache.hadoop.hdds.scm.pipeline.PipelineID)35 Pipeline (org.apache.hadoop.hdds.scm.pipeline.Pipeline)15 Test (org.junit.Test)13 IOException (java.io.IOException)12 DatanodeDetails (org.apache.hadoop.hdds.protocol.DatanodeDetails)11 HddsProtos (org.apache.hadoop.hdds.protocol.proto.HddsProtos)6 PipelineNotFoundException (org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException)6 XceiverServerSpi (org.apache.hadoop.ozone.container.common.transport.server.XceiverServerSpi)5 ArrayList (java.util.ArrayList)4 HashMap (java.util.HashMap)4 UUID (java.util.UUID)4 OzoneConfiguration (org.apache.hadoop.hdds.conf.OzoneConfiguration)4 List (java.util.List)3 ContainerID (org.apache.hadoop.hdds.scm.container.ContainerID)3 RaftGroup (org.apache.ratis.protocol.RaftGroup)3 VisibleForTesting (com.google.common.annotations.VisibleForTesting)2 ConfigurationSource (org.apache.hadoop.hdds.conf.ConfigurationSource)2 MockDatanodeDetails (org.apache.hadoop.hdds.protocol.MockDatanodeDetails)2 ClosePipelineInfo (org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ClosePipelineInfo)2 CreatePipelineCommandProto (org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.CreatePipelineCommandProto)2