use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations in project hadoop by apache.
the class BlockManager method getBlocksWithLocations.
/** Get all blocks with location information from a datanode. */
public BlocksWithLocations getBlocksWithLocations(final DatanodeID datanode, final long size) throws UnregisteredNodeException {
final DatanodeDescriptor node = getDatanodeManager().getDatanode(datanode);
if (node == null) {
blockLog.warn("BLOCK* getBlocks: Asking for blocks from an" + " unrecorded node {}", datanode);
throw new HadoopIllegalArgumentException("Datanode " + datanode + " not found.");
}
int numBlocks = node.numBlocks();
if (numBlocks == 0) {
return new BlocksWithLocations(new BlockWithLocations[0]);
}
Iterator<BlockInfo> iter = node.getBlockIterator();
// starting from a random block
int startBlock = ThreadLocalRandom.current().nextInt(numBlocks);
// skip blocks
for (int i = 0; i < startBlock; i++) {
iter.next();
}
List<BlockWithLocations> results = new ArrayList<BlockWithLocations>();
long totalSize = 0;
BlockInfo curBlock;
while (totalSize < size && iter.hasNext()) {
curBlock = iter.next();
if (!curBlock.isComplete())
continue;
if (curBlock.getNumBytes() < getBlocksMinBlockSize) {
continue;
}
totalSize += addBlock(curBlock, results);
}
if (totalSize < size) {
// start from the beginning
iter = node.getBlockIterator();
for (int i = 0; i < startBlock && totalSize < size; i++) {
curBlock = iter.next();
if (!curBlock.isComplete())
continue;
if (curBlock.getNumBytes() < getBlocksMinBlockSize) {
continue;
}
totalSize += addBlock(curBlock, results);
}
}
return new BlocksWithLocations(results.toArray(new BlockWithLocations[results.size()]));
}
use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations in project hadoop by apache.
the class NamenodeProtocolServerSideTranslatorPB method getBlocks.
@Override
public GetBlocksResponseProto getBlocks(RpcController unused, GetBlocksRequestProto request) throws ServiceException {
DatanodeInfo dnInfo = new DatanodeInfoBuilder().setNodeID(PBHelperClient.convert(request.getDatanode())).build();
BlocksWithLocations blocks;
try {
blocks = impl.getBlocks(dnInfo, request.getSize());
} catch (IOException e) {
throw new ServiceException(e);
}
return GetBlocksResponseProto.newBuilder().setBlocks(PBHelper.convert(blocks)).build();
}
use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations in project hadoop by apache.
the class PBHelper method convert.
public static BlocksWithLocations convert(BlocksWithLocationsProto blocks) {
List<BlockWithLocationsProto> b = blocks.getBlocksList();
BlockWithLocations[] ret = new BlockWithLocations[b.size()];
int i = 0;
for (BlockWithLocationsProto entry : b) {
ret[i++] = convert(entry);
}
return new BlocksWithLocations(ret);
}
use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations in project hadoop by apache.
the class TestPBHelper method testConvertBlocksWithLocations.
@Test
public void testConvertBlocksWithLocations() {
boolean[] testSuite = new boolean[] { false, true };
for (int i = 0; i < testSuite.length; i++) {
BlockWithLocations[] list = new BlockWithLocations[] { getBlockWithLocations(1, testSuite[i]), getBlockWithLocations(2, testSuite[i]) };
BlocksWithLocations locs = new BlocksWithLocations(list);
BlocksWithLocationsProto locsProto = PBHelper.convert(locs);
BlocksWithLocations locs2 = PBHelper.convert(locsProto);
BlockWithLocations[] blocks = locs.getBlocks();
BlockWithLocations[] blocks2 = locs2.getBlocks();
assertEquals(blocks.length, blocks2.length);
for (int j = 0; j < blocks.length; j++) {
compare(blocks[j], blocks2[j]);
}
}
}
Aggregations