Search in sources :

Example 1 with BlockReportResponseProto

use of org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto in project hadoop by apache.

the class TestBlockListAsLongs method testDatanodeDetect.

@Test
public void testDatanodeDetect() throws ServiceException, IOException {
    final AtomicReference<BlockReportRequestProto> request = new AtomicReference<>();
    // just capture the outgoing PB
    DatanodeProtocolPB mockProxy = mock(DatanodeProtocolPB.class);
    doAnswer(new Answer<BlockReportResponseProto>() {

        public BlockReportResponseProto answer(InvocationOnMock invocation) {
            Object[] args = invocation.getArguments();
            request.set((BlockReportRequestProto) args[1]);
            return BlockReportResponseProto.newBuilder().build();
        }
    }).when(mockProxy).blockReport(any(RpcController.class), any(BlockReportRequestProto.class));
    @SuppressWarnings("resource") DatanodeProtocolClientSideTranslatorPB nn = new DatanodeProtocolClientSideTranslatorPB(mockProxy);
    DatanodeRegistration reg = DFSTestUtil.getLocalDatanodeRegistration();
    NamespaceInfo nsInfo = new NamespaceInfo(1, "cluster", "bp", 1);
    reg.setNamespaceInfo(nsInfo);
    Replica r = new FinalizedReplica(new Block(1, 2, 3), null, null);
    BlockListAsLongs bbl = BlockListAsLongs.encode(Collections.singleton(r));
    DatanodeStorage storage = new DatanodeStorage("s1");
    StorageBlockReport[] sbr = { new StorageBlockReport(storage, bbl) };
    // check DN sends new-style BR
    request.set(null);
    nsInfo.setCapabilities(Capability.STORAGE_BLOCK_REPORT_BUFFERS.getMask());
    nn.blockReport(reg, "pool", sbr, new BlockReportContext(1, 0, System.nanoTime(), 0L, true));
    BlockReportRequestProto proto = request.get();
    assertNotNull(proto);
    assertTrue(proto.getReports(0).getBlocksList().isEmpty());
    assertFalse(proto.getReports(0).getBlocksBuffersList().isEmpty());
    // back up to prior version and check DN sends old-style BR
    request.set(null);
    nsInfo.setCapabilities(Capability.UNKNOWN.getMask());
    BlockListAsLongs blockList = getBlockList(r);
    StorageBlockReport[] obp = new StorageBlockReport[] { new StorageBlockReport(new DatanodeStorage("s1"), blockList) };
    nn.blockReport(reg, "pool", obp, new BlockReportContext(1, 0, System.nanoTime(), 0L, true));
    proto = request.get();
    assertNotNull(proto);
    assertFalse(proto.getReports(0).getBlocksList().isEmpty());
    assertTrue(proto.getReports(0).getBlocksBuffersList().isEmpty());
}
Also used : StorageBlockReport(org.apache.hadoop.hdfs.server.protocol.StorageBlockReport) AtomicReference(java.util.concurrent.atomic.AtomicReference) FinalizedReplica(org.apache.hadoop.hdfs.server.datanode.FinalizedReplica) BlockReportReplica(org.apache.hadoop.hdfs.protocol.BlockListAsLongs.BlockReportReplica) Replica(org.apache.hadoop.hdfs.server.datanode.Replica) FinalizedReplica(org.apache.hadoop.hdfs.server.datanode.FinalizedReplica) RpcController(com.google.protobuf.RpcController) DatanodeProtocolClientSideTranslatorPB(org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB) DatanodeProtocolPB(org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB) DatanodeRegistration(org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration) BlockReportRequestProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportRequestProto) InvocationOnMock(org.mockito.invocation.InvocationOnMock) BlockReportContext(org.apache.hadoop.hdfs.server.protocol.BlockReportContext) DatanodeStorage(org.apache.hadoop.hdfs.server.protocol.DatanodeStorage) BlockReportResponseProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto) NamespaceInfo(org.apache.hadoop.hdfs.server.protocol.NamespaceInfo) Test(org.junit.Test)

Example 2 with BlockReportResponseProto

use of org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto in project hadoop by apache.

the class DatanodeProtocolClientSideTranslatorPB method blockReport.

@Override
public DatanodeCommand blockReport(DatanodeRegistration registration, String poolId, StorageBlockReport[] reports, BlockReportContext context) throws IOException {
    BlockReportRequestProto.Builder builder = BlockReportRequestProto.newBuilder().setRegistration(PBHelper.convert(registration)).setBlockPoolId(poolId);
    boolean useBlocksBuffer = registration.getNamespaceInfo().isCapabilitySupported(Capability.STORAGE_BLOCK_REPORT_BUFFERS);
    for (StorageBlockReport r : reports) {
        StorageBlockReportProto.Builder reportBuilder = StorageBlockReportProto.newBuilder().setStorage(PBHelperClient.convert(r.getStorage()));
        BlockListAsLongs blocks = r.getBlocks();
        if (useBlocksBuffer) {
            reportBuilder.setNumberOfBlocks(blocks.getNumberOfBlocks());
            reportBuilder.addAllBlocksBuffers(blocks.getBlocksBuffers());
        } else {
            for (long value : blocks.getBlockListAsLongs()) {
                reportBuilder.addBlocks(value);
            }
        }
        builder.addReports(reportBuilder.build());
    }
    builder.setContext(PBHelper.convert(context));
    BlockReportResponseProto resp;
    try {
        resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build());
    } catch (ServiceException se) {
        throw ProtobufHelper.getRemoteException(se);
    }
    return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null;
}
Also used : BlockReportRequestProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportRequestProto) ServiceException(com.google.protobuf.ServiceException) StorageBlockReport(org.apache.hadoop.hdfs.server.protocol.StorageBlockReport) BlockListAsLongs(org.apache.hadoop.hdfs.protocol.BlockListAsLongs) BlockReportResponseProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto) StorageBlockReportProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageBlockReportProto)

Example 3 with BlockReportResponseProto

use of org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto in project hadoop by apache.

the class DatanodeProtocolServerSideTranslatorPB method blockReport.

@Override
public BlockReportResponseProto blockReport(RpcController controller, BlockReportRequestProto request) throws ServiceException {
    DatanodeCommand cmd = null;
    StorageBlockReport[] report = new StorageBlockReport[request.getReportsCount()];
    int index = 0;
    for (StorageBlockReportProto s : request.getReportsList()) {
        final BlockListAsLongs blocks;
        if (s.hasNumberOfBlocks()) {
            // new style buffer based reports
            int num = (int) s.getNumberOfBlocks();
            Preconditions.checkState(s.getBlocksCount() == 0, "cannot send both blocks list and buffers");
            blocks = BlockListAsLongs.decodeBuffers(num, s.getBlocksBuffersList(), maxDataLength);
        } else {
            blocks = BlockListAsLongs.decodeLongs(s.getBlocksList(), maxDataLength);
        }
        report[index++] = new StorageBlockReport(PBHelperClient.convert(s.getStorage()), blocks);
    }
    try {
        cmd = impl.blockReport(PBHelper.convert(request.getRegistration()), request.getBlockPoolId(), report, request.hasContext() ? PBHelper.convert(request.getContext()) : null);
    } catch (IOException e) {
        throw new ServiceException(e);
    }
    BlockReportResponseProto.Builder builder = BlockReportResponseProto.newBuilder();
    if (cmd != null) {
        builder.setCmd(PBHelper.convert(cmd));
    }
    return builder.build();
}
Also used : DatanodeCommand(org.apache.hadoop.hdfs.server.protocol.DatanodeCommand) ServiceException(com.google.protobuf.ServiceException) StorageBlockReport(org.apache.hadoop.hdfs.server.protocol.StorageBlockReport) BlockListAsLongs(org.apache.hadoop.hdfs.protocol.BlockListAsLongs) IOException(java.io.IOException) BlockReportResponseProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto) StorageBlockReportProto(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageBlockReportProto)

Aggregations

BlockReportResponseProto (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportResponseProto)3 StorageBlockReport (org.apache.hadoop.hdfs.server.protocol.StorageBlockReport)3 ServiceException (com.google.protobuf.ServiceException)2 BlockListAsLongs (org.apache.hadoop.hdfs.protocol.BlockListAsLongs)2 BlockReportRequestProto (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockReportRequestProto)2 StorageBlockReportProto (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.StorageBlockReportProto)2 RpcController (com.google.protobuf.RpcController)1 IOException (java.io.IOException)1 AtomicReference (java.util.concurrent.atomic.AtomicReference)1 BlockReportReplica (org.apache.hadoop.hdfs.protocol.BlockListAsLongs.BlockReportReplica)1 DatanodeProtocolClientSideTranslatorPB (org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB)1 DatanodeProtocolPB (org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB)1 FinalizedReplica (org.apache.hadoop.hdfs.server.datanode.FinalizedReplica)1 Replica (org.apache.hadoop.hdfs.server.datanode.Replica)1 BlockReportContext (org.apache.hadoop.hdfs.server.protocol.BlockReportContext)1 DatanodeCommand (org.apache.hadoop.hdfs.server.protocol.DatanodeCommand)1 DatanodeRegistration (org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration)1 DatanodeStorage (org.apache.hadoop.hdfs.server.protocol.DatanodeStorage)1 NamespaceInfo (org.apache.hadoop.hdfs.server.protocol.NamespaceInfo)1 Test (org.junit.Test)1