Search in sources :

Example 96 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class SCMClientProtocolServer method getContainerWithPipelineCommon.

private ContainerWithPipeline getContainerWithPipelineCommon(long containerID) throws IOException {
    final ContainerID cid = ContainerID.valueOf(containerID);
    final ContainerInfo container = scm.getContainerManager().getContainer(cid);
    if (scm.getScmContext().isInSafeMode()) {
        if (container.isOpen()) {
            if (!hasRequiredReplicas(container)) {
                throw new SCMException("Open container " + containerID + " doesn't" + " have enough replicas to service this operation in " + "Safe mode.", ResultCodes.SAFE_MODE_EXCEPTION);
            }
        }
    }
    Pipeline pipeline;
    try {
        pipeline = container.isOpen() ? scm.getPipelineManager().getPipeline(container.getPipelineID()) : null;
    } catch (PipelineNotFoundException ex) {
        // The pipeline is destroyed.
        pipeline = null;
    }
    if (pipeline == null) {
        pipeline = scm.getPipelineManager().createPipelineForRead(container.getReplicationConfig(), scm.getContainerManager().getContainerReplicas(cid));
    }
    return new ContainerWithPipeline(container, pipeline);
}
Also used : ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) ContainerInfo(org.apache.hadoop.hdds.scm.container.ContainerInfo) PipelineNotFoundException(org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) SCMException(org.apache.hadoop.hdds.scm.exceptions.SCMException) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 97 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class SCMClientProtocolServer method closePipeline.

@Override
public void closePipeline(HddsProtos.PipelineID pipelineID) throws IOException {
    getScm().checkAdminAccess(getRemoteUser());
    Map<String, String> auditMap = Maps.newHashMap();
    auditMap.put("pipelineID", pipelineID.getId());
    PipelineManager pipelineManager = scm.getPipelineManager();
    Pipeline pipeline = pipelineManager.getPipeline(PipelineID.getFromProtobuf(pipelineID));
    pipelineManager.closePipeline(pipeline, true);
    AUDIT.logWriteSuccess(buildAuditMessageForSuccess(SCMAction.CLOSE_PIPELINE, null));
}
Also used : PipelineManager(org.apache.hadoop.hdds.scm.pipeline.PipelineManager) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 98 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class SCMClientProtocolServer method listContainer.

/**
 * Lists a range of containers and get their info.
 *
 * @param startContainerID start containerID.
 * @param count count must be {@literal >} 0.
 * @param state Container with this state will be returned.
 * @param factor Container factor.
 * @return a list of pipeline.
 * @throws IOException
 */
@Override
@Deprecated
public List<ContainerInfo> listContainer(long startContainerID, int count, HddsProtos.LifeCycleState state, HddsProtos.ReplicationFactor factor) throws IOException {
    boolean auditSuccess = true;
    Map<String, String> auditMap = Maps.newHashMap();
    auditMap.put("startContainerID", String.valueOf(startContainerID));
    auditMap.put("count", String.valueOf(count));
    if (state != null) {
        auditMap.put("state", state.name());
    }
    if (factor != null) {
        auditMap.put("factor", factor.name());
    }
    try {
        final ContainerID containerId = ContainerID.valueOf(startContainerID);
        if (state != null) {
            if (factor != null) {
                return scm.getContainerManager().getContainers(state).stream().filter(info -> info.containerID().getId() >= startContainerID).filter(info -> info.getReplicationType() != HddsProtos.ReplicationType.EC).filter(info -> (info.getReplicationFactor() == factor)).sorted().limit(count).collect(Collectors.toList());
            } else {
                return scm.getContainerManager().getContainers(state).stream().filter(info -> info.containerID().getId() >= startContainerID).sorted().limit(count).collect(Collectors.toList());
            }
        } else {
            if (factor != null) {
                return scm.getContainerManager().getContainers().stream().filter(info -> info.containerID().getId() >= startContainerID).filter(info -> info.getReplicationType() != HddsProtos.ReplicationType.EC).filter(info -> info.getReplicationFactor() == factor).sorted().limit(count).collect(Collectors.toList());
            } else {
                return scm.getContainerManager().getContainers(containerId, count);
            }
        }
    } catch (Exception ex) {
        auditSuccess = false;
        AUDIT.logReadFailure(buildAuditMessageForFailure(SCMAction.LIST_CONTAINER, auditMap, ex));
        throw ex;
    } finally {
        if (auditSuccess) {
            AUDIT.logReadSuccess(buildAuditMessageForSuccess(SCMAction.LIST_CONTAINER, auditMap));
        }
    }
}
Also used : StorageContainerLocationProtocolPB(org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB) HddsProtos(org.apache.hadoop.hdds.protocol.proto.HddsProtos) NodeStatus(org.apache.hadoop.hdds.scm.node.NodeStatus) Arrays(java.util.Arrays) ProtocolMessageEnum(com.google.protobuf.ProtocolMessageEnum) BlockingService(com.google.protobuf.BlockingService) SCMAction(org.apache.hadoop.ozone.audit.SCMAction) ProtobufRpcEngine(org.apache.hadoop.ipc.ProtobufRpcEngine) LoggerFactory(org.slf4j.LoggerFactory) DatanodeUsageInfo(org.apache.hadoop.hdds.scm.node.DatanodeUsageInfo) AuditLoggerType(org.apache.hadoop.ozone.audit.AuditLoggerType) SCMNodeStat(org.apache.hadoop.hdds.scm.container.placement.metrics.SCMNodeStat) AuditEventStatus(org.apache.hadoop.ozone.audit.AuditEventStatus) ContainerReplica(org.apache.hadoop.hdds.scm.container.ContainerReplica) Pair(org.apache.commons.lang3.tuple.Pair) Auditor(org.apache.hadoop.ozone.audit.Auditor) PipelineNotFoundException(org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException) Map(java.util.Map) StatusAndMessages(org.apache.hadoop.ozone.upgrade.UpgradeFinalizer.StatusAndMessages) ScmInfo(org.apache.hadoop.hdds.scm.ScmInfo) Set(java.util.Set) InetSocketAddress(java.net.InetSocketAddress) Collectors(java.util.stream.Collectors) OZONE_SCM_HANDLER_COUNT_KEY(org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HANDLER_COUNT_KEY) List(java.util.List) Stream(java.util.stream.Stream) InvalidContainerBalancerConfigurationException(org.apache.hadoop.hdds.scm.container.balancer.InvalidContainerBalancerConfigurationException) ContainerBalancer(org.apache.hadoop.hdds.scm.container.balancer.ContainerBalancer) RPC(org.apache.hadoop.ipc.RPC) Optional(java.util.Optional) ReplicationManagerReport(org.apache.hadoop.hdds.scm.container.ReplicationManagerReport) SCMException(org.apache.hadoop.hdds.scm.exceptions.SCMException) ContainerInfo(org.apache.hadoop.hdds.scm.container.ContainerInfo) ServerUtils.getRemoteUserName(org.apache.hadoop.hdds.server.ServerUtils.getRemoteUserName) OzoneConfiguration(org.apache.hadoop.hdds.conf.OzoneConfiguration) ServerUtils.updateRPCListenAddress(org.apache.hadoop.hdds.server.ServerUtils.updateRPCListenAddress) ContainerNotFoundException(org.apache.hadoop.hdds.scm.container.ContainerNotFoundException) StorageContainerLocationProtocolProtos(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos) AuditLogger(org.apache.hadoop.ozone.audit.AuditLogger) AuditAction(org.apache.hadoop.ozone.audit.AuditAction) TreeSet(java.util.TreeSet) StorageContainerManager.startRpcServer(org.apache.hadoop.hdds.scm.server.StorageContainerManager.startRpcServer) ArrayList(java.util.ArrayList) AuditMessage(org.apache.hadoop.ozone.audit.AuditMessage) NodeNotFoundException(org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException) StorageContainerLocationProtocolServerSideTranslatorPB(org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocolServerSideTranslatorPB) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation) ResultCodes(org.apache.hadoop.hdds.scm.exceptions.SCMException.ResultCodes) Server(org.apache.hadoop.ipc.Server) ContainerBalancerConfiguration(org.apache.hadoop.hdds.scm.container.balancer.ContainerBalancerConfiguration) CommonConfigurationKeys(org.apache.hadoop.fs.CommonConfigurationKeys) Logger(org.slf4j.Logger) DatanodeAdminError(org.apache.hadoop.hdds.scm.DatanodeAdminError) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) ReplicationConfig(org.apache.hadoop.hdds.client.ReplicationConfig) DatanodeDetails(org.apache.hadoop.hdds.protocol.DatanodeDetails) ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) IOException(java.io.IOException) SCMEvents(org.apache.hadoop.hdds.scm.events.SCMEvents) OzoneConsts(org.apache.hadoop.ozone.OzoneConsts) Maps(com.google.common.collect.Maps) IllegalContainerBalancerStateException(org.apache.hadoop.hdds.scm.container.balancer.IllegalContainerBalancerStateException) PipelineManager(org.apache.hadoop.hdds.scm.pipeline.PipelineManager) Token(org.apache.hadoop.security.token.Token) OZONE_SCM_HANDLER_COUNT_DEFAULT(org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_HANDLER_COUNT_DEFAULT) IOUtils(org.apache.hadoop.io.IOUtils) PipelineID(org.apache.hadoop.hdds.scm.pipeline.PipelineID) StorageContainerLocationProtocol(org.apache.hadoop.hdds.scm.protocol.StorageContainerLocationProtocol) HddsServerUtil(org.apache.hadoop.hdds.utils.HddsServerUtil) Preconditions(com.google.common.base.Preconditions) VisibleForTesting(com.google.common.annotations.VisibleForTesting) ProtocolMessageMetrics(org.apache.hadoop.hdds.utils.ProtocolMessageMetrics) Collections(java.util.Collections) StartContainerBalancerResponseProto(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.StartContainerBalancerResponseProto) Strings(org.apache.ratis.thirdparty.com.google.common.base.Strings) StorageContainerLocationProtocolService.newReflectiveBlockingService(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.StorageContainerLocationProtocolService.newReflectiveBlockingService) ContainerID(org.apache.hadoop.hdds.scm.container.ContainerID) PipelineNotFoundException(org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException) InvalidContainerBalancerConfigurationException(org.apache.hadoop.hdds.scm.container.balancer.InvalidContainerBalancerConfigurationException) SCMException(org.apache.hadoop.hdds.scm.exceptions.SCMException) ContainerNotFoundException(org.apache.hadoop.hdds.scm.container.ContainerNotFoundException) NodeNotFoundException(org.apache.hadoop.hdds.scm.node.states.NodeNotFoundException) IOException(java.io.IOException) IllegalContainerBalancerStateException(org.apache.hadoop.hdds.scm.container.balancer.IllegalContainerBalancerStateException)

Example 99 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class StorageContainerLocationProtocolServerSideTranslatorPB method listPipelines.

public ListPipelineResponseProto listPipelines(ListPipelineRequestProto request, int clientVersion) throws IOException {
    ListPipelineResponseProto.Builder builder = ListPipelineResponseProto.newBuilder();
    List<Pipeline> pipelines = impl.listPipelines();
    for (Pipeline pipeline : pipelines) {
        builder.addPipelines(pipeline.getProtobufMessage(clientVersion));
    }
    return builder.build();
}
Also used : ListPipelineResponseProto(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.ListPipelineResponseProto) GetContainerWithPipeline(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.Type.GetContainerWithPipeline) GetPipeline(org.apache.hadoop.hdds.protocol.proto.StorageContainerLocationProtocolProtos.Type.GetPipeline) ContainerWithPipeline(org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Example 100 with Pipeline

use of org.apache.hadoop.hdds.scm.pipeline.Pipeline in project ozone by apache.

the class OneReplicaPipelineSafeModeRule method process.

@Override
protected synchronized void process(PipelineReportFromDatanode report) {
    Preconditions.checkNotNull(report);
    for (PipelineReport report1 : report.getReport().getPipelineReportList()) {
        Pipeline pipeline;
        try {
            pipeline = pipelineManager.getPipeline(PipelineID.getFromProtobuf(report1.getPipelineID()));
        } catch (PipelineNotFoundException pnfe) {
            continue;
        }
        if (RatisReplicationConfig.hasFactor(pipeline.getReplicationConfig(), ReplicationFactor.THREE) && pipeline.isOpen() && !reportedPipelineIDSet.contains(pipeline.getId())) {
            if (oldPipelineIDSet.contains(pipeline.getId())) {
                getSafeModeMetrics().incCurrentHealthyPipelinesWithAtleastOneReplicaReportedCount();
                currentReportedPipelineCount++;
                reportedPipelineIDSet.add(pipeline.getId());
            }
        }
    }
    if (scmInSafeMode()) {
        SCMSafeModeManager.getLogger().info("SCM in safe mode. Pipelines with at least one datanode reported " + "count is {}, required at least one datanode reported per " + "pipeline count is {}", currentReportedPipelineCount, thresholdCount);
    }
}
Also used : PipelineReport(org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.PipelineReport) PipelineNotFoundException(org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline)

Aggregations

Pipeline (org.apache.hadoop.hdds.scm.pipeline.Pipeline)199 DatanodeDetails (org.apache.hadoop.hdds.protocol.DatanodeDetails)73 Test (org.junit.Test)68 ArrayList (java.util.ArrayList)56 ContainerInfo (org.apache.hadoop.hdds.scm.container.ContainerInfo)51 IOException (java.io.IOException)47 Test (org.junit.jupiter.api.Test)42 BlockID (org.apache.hadoop.hdds.client.BlockID)41 OmKeyLocationInfo (org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo)40 ContainerWithPipeline (org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline)39 OzoneOutputStream (org.apache.hadoop.ozone.client.io.OzoneOutputStream)37 XceiverClientSpi (org.apache.hadoop.hdds.scm.XceiverClientSpi)32 OzoneConfiguration (org.apache.hadoop.hdds.conf.OzoneConfiguration)29 MockPipeline (org.apache.hadoop.hdds.scm.pipeline.MockPipeline)28 KeyOutputStream (org.apache.hadoop.ozone.client.io.KeyOutputStream)27 List (java.util.List)26 OmKeyInfo (org.apache.hadoop.ozone.om.helpers.OmKeyInfo)26 ContainerProtos (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos)24 HashMap (java.util.HashMap)23 PipelineID (org.apache.hadoop.hdds.scm.pipeline.PipelineID)21