use of org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto in project hbase by apache.
the class FanOutOneBlockAsyncDFSOutputHelper method createStorageTypeSetter.
private static StorageTypeSetter createStorageTypeSetter() throws NoSuchMethodException {
Method setStorageTypeMethod = OpWriteBlockProto.Builder.class.getMethod("setStorageType", StorageTypeProto.class);
ImmutableMap.Builder<String, StorageTypeProto> builder = ImmutableMap.builder();
for (StorageTypeProto storageTypeProto : StorageTypeProto.values()) {
builder.put(storageTypeProto.name(), storageTypeProto);
}
ImmutableMap<String, StorageTypeProto> name2ProtoEnum = builder.build();
return new StorageTypeSetter() {
@Override
public OpWriteBlockProto.Builder set(OpWriteBlockProto.Builder builder, Enum<?> storageType) {
Object protoEnum = name2ProtoEnum.get(storageType.name());
try {
setStorageTypeMethod.invoke(builder, protoEnum);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
throw new RuntimeException(e);
}
return builder;
}
};
}
use of org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.StorageTypeProto in project hadoop by apache.
the class PBHelper method convert.
public static BlockWithLocations convert(BlockWithLocationsProto b) {
final List<String> datanodeUuids = b.getDatanodeUuidsList();
final List<String> storageUuids = b.getStorageUuidsList();
final List<StorageTypeProto> storageTypes = b.getStorageTypesList();
BlockWithLocations blk = new BlockWithLocations(PBHelperClient.convert(b.getBlock()), datanodeUuids.toArray(new String[datanodeUuids.size()]), storageUuids.toArray(new String[storageUuids.size()]), PBHelperClient.convertStorageTypes(storageTypes, storageUuids.size()));
if (b.hasIndices()) {
blk = new StripedBlockWithLocations(blk, b.getIndices().toByteArray(), (short) b.getDataBlockNum(), b.getCellSize());
}
return blk;
}
Aggregations