Search in sources :

Example 1 with StripedBlockWithLocations

use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations in project hadoop by apache.

the class BlockManager method addBlock.

/**
   * Get all valid locations of the block & add the block to results
   * @return the length of the added block; 0 if the block is not added. If the
   * added block is a block group, return its approximate internal block size
   */
private long addBlock(BlockInfo block, List<BlockWithLocations> results) {
    final List<DatanodeStorageInfo> locations = getValidLocations(block);
    if (locations.size() == 0) {
        return 0;
    } else {
        final String[] datanodeUuids = new String[locations.size()];
        final String[] storageIDs = new String[datanodeUuids.length];
        final StorageType[] storageTypes = new StorageType[datanodeUuids.length];
        for (int i = 0; i < locations.size(); i++) {
            final DatanodeStorageInfo s = locations.get(i);
            datanodeUuids[i] = s.getDatanodeDescriptor().getDatanodeUuid();
            storageIDs[i] = s.getStorageID();
            storageTypes[i] = s.getStorageType();
        }
        BlockWithLocations blkWithLocs = new BlockWithLocations(block, datanodeUuids, storageIDs, storageTypes);
        if (block.isStriped()) {
            BlockInfoStriped blockStriped = (BlockInfoStriped) block;
            byte[] indices = new byte[locations.size()];
            for (int i = 0; i < locations.size(); i++) {
                indices[i] = (byte) blockStriped.getStorageBlockIndex(locations.get(i));
            }
            results.add(new StripedBlockWithLocations(blkWithLocs, indices, blockStriped.getDataBlockNum(), blockStriped.getCellSize()));
            // approximate size
            return block.getNumBytes() / blockStriped.getDataBlockNum();
        } else {
            results.add(blkWithLocs);
            return block.getNumBytes();
        }
    }
}
Also used : StorageType(org.apache.hadoop.fs.StorageType) StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) BlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations)

Example 2 with StripedBlockWithLocations

use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations in project hadoop by apache.

the class PBHelper method convert.

public static BlockWithLocations convert(BlockWithLocationsProto b) {
    final List<String> datanodeUuids = b.getDatanodeUuidsList();
    final List<String> storageUuids = b.getStorageUuidsList();
    final List<StorageTypeProto> storageTypes = b.getStorageTypesList();
    BlockWithLocations blk = new BlockWithLocations(PBHelperClient.convert(b.getBlock()), datanodeUuids.toArray(new String[datanodeUuids.size()]), storageUuids.toArray(new String[storageUuids.size()]), PBHelperClient.convertStorageTypes(storageTypes, storageUuids.size()));
    if (b.hasIndices()) {
        blk = new StripedBlockWithLocations(blk, b.getIndices().toByteArray(), (short) b.getDataBlockNum(), b.getCellSize());
    }
    return blk;
}
Also used : StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) BlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations) StorageTypeProto(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto) ByteString(com.google.protobuf.ByteString)

Example 3 with StripedBlockWithLocations

use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations in project hadoop by apache.

the class PBHelper method convert.

public static BlockWithLocationsProto convert(BlockWithLocations blk) {
    BlockWithLocationsProto.Builder builder = BlockWithLocationsProto.newBuilder().setBlock(PBHelperClient.convert(blk.getBlock())).addAllDatanodeUuids(Arrays.asList(blk.getDatanodeUuids())).addAllStorageUuids(Arrays.asList(blk.getStorageIDs())).addAllStorageTypes(PBHelperClient.convertStorageTypes(blk.getStorageTypes()));
    if (blk instanceof StripedBlockWithLocations) {
        StripedBlockWithLocations sblk = (StripedBlockWithLocations) blk;
        builder.setIndices(PBHelperClient.getByteString(sblk.getIndices()));
        builder.setDataBlockNum(sblk.getDataBlockNum());
        builder.setCellSize(sblk.getCellSize());
    }
    return builder.build();
}
Also used : StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) BlockWithLocationsProto(org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.BlockWithLocationsProto)

Example 4 with StripedBlockWithLocations

use of org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations in project hadoop by apache.

the class TestPBHelper method getBlockWithLocations.

private static BlockWithLocations getBlockWithLocations(int bid, boolean isStriped) {
    final String[] datanodeUuids = { "dn1", "dn2", "dn3" };
    final String[] storageIDs = { "s1", "s2", "s3" };
    final StorageType[] storageTypes = { StorageType.DISK, StorageType.DISK, StorageType.DISK };
    final byte[] indices = { 0, 1, 2 };
    final short dataBlkNum = 6;
    BlockWithLocations blkLocs = new BlockWithLocations(new Block(bid, 0, 1), datanodeUuids, storageIDs, storageTypes);
    if (isStriped) {
        blkLocs = new StripedBlockWithLocations(blkLocs, indices, dataBlkNum, StripedFileTestUtil.getDefaultECPolicy().getCellSize());
    }
    return blkLocs;
}
Also used : StorageType(org.apache.hadoop.fs.StorageType) StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) StripedBlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations) BlockWithLocations(org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations) Block(org.apache.hadoop.hdfs.protocol.Block) ExtendedBlock(org.apache.hadoop.hdfs.protocol.ExtendedBlock) RecoveringBlock(org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) ByteString(com.google.protobuf.ByteString)

Aggregations

StripedBlockWithLocations (org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations)4 BlockWithLocations (org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations)3 ByteString (com.google.protobuf.ByteString)2 StorageType (org.apache.hadoop.fs.StorageType)2 Block (org.apache.hadoop.hdfs.protocol.Block)1 ExtendedBlock (org.apache.hadoop.hdfs.protocol.ExtendedBlock)1 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)1 StorageTypeProto (org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto)1 BlockWithLocationsProto (org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.BlockWithLocationsProto)1 RecoveringBlock (org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock)1