use of org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpBlockGroupChecksumProto in project hadoop by apache.
the class Sender method blockGroupChecksum.
@Override
public void blockGroupChecksum(StripedBlockInfo stripedBlockInfo, Token<BlockTokenIdentifier> blockToken, long requestedNumBytes) throws IOException {
OpBlockGroupChecksumProto proto = OpBlockGroupChecksumProto.newBuilder().setHeader(DataTransferProtoUtil.buildBaseHeader(stripedBlockInfo.getBlock(), blockToken)).setDatanodes(PBHelperClient.convertToProto(stripedBlockInfo.getDatanodes())).addAllBlockTokens(PBHelperClient.convert(stripedBlockInfo.getBlockTokens())).addAllBlockIndices(PBHelperClient.convertBlockIndices(stripedBlockInfo.getBlockIndices())).setEcPolicy(PBHelperClient.convertErasureCodingPolicy(stripedBlockInfo.getErasureCodingPolicy())).setRequestedNumBytes(requestedNumBytes).build();
send(out, Op.BLOCK_GROUP_CHECKSUM, proto);
}
use of org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpBlockGroupChecksumProto in project hadoop by apache.
the class Receiver method opStripedBlockChecksum.
/** Receive OP_STRIPED_BLOCK_CHECKSUM. */
private void opStripedBlockChecksum(DataInputStream dis) throws IOException {
OpBlockGroupChecksumProto proto = OpBlockGroupChecksumProto.parseFrom(vintPrefixed(dis));
TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName());
StripedBlockInfo stripedBlockInfo = new StripedBlockInfo(PBHelperClient.convert(proto.getHeader().getBlock()), PBHelperClient.convert(proto.getDatanodes()), PBHelperClient.convertTokens(proto.getBlockTokensList()), PBHelperClient.convertBlockIndices(proto.getBlockIndicesList()), PBHelperClient.convertErasureCodingPolicy(proto.getEcPolicy()));
try {
blockGroupChecksum(stripedBlockInfo, PBHelperClient.convert(proto.getHeader().getToken()), proto.getRequestedNumBytes());
} finally {
if (traceScope != null) {
traceScope.close();
}
}
}
Aggregations