Search in sources :

Example 16 with ChunkInfo

use of org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo in project ozone by apache.

the class TestBlockInputStream method testRefreshExitsIfPipelineHasSameNodes.

@Test
public void testRefreshExitsIfPipelineHasSameNodes() throws Exception {
    // GIVEN
    BlockID blockID = new BlockID(new ContainerBlockID(1, 1));
    Pipeline pipeline = MockPipeline.createSingleNodePipeline();
    final int len = 200;
    final ChunkInputStream stream = mock(ChunkInputStream.class);
    when(stream.read(any(), anyInt(), anyInt())).thenThrow(new StorageContainerException("test", CONTAINER_UNHEALTHY));
    when(stream.getRemaining()).thenReturn((long) len);
    when(refreshPipeline.apply(blockID)).thenAnswer(invocation -> samePipelineWithNewId(pipeline));
    BlockInputStream subject = new DummyBlockInputStream(blockID, blockSize, pipeline, null, false, null, refreshPipeline, chunks, null) {

        @Override
        protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
            return stream;
        }
    };
    try {
        subject.initialize();
        // WHEN
        byte[] b = new byte[len];
        LambdaTestUtils.intercept(StorageContainerException.class, () -> subject.read(b, 0, len));
        // THEN
        verify(refreshPipeline).apply(blockID);
    } finally {
        subject.close();
    }
}
Also used : ChunkInfo(org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo) BlockID(org.apache.hadoop.hdds.client.BlockID) ContainerBlockID(org.apache.hadoop.hdds.client.ContainerBlockID) ContainerBlockID(org.apache.hadoop.hdds.client.ContainerBlockID) StorageContainerException(org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException) MockPipeline(org.apache.hadoop.hdds.scm.pipeline.MockPipeline) Pipeline(org.apache.hadoop.hdds.scm.pipeline.Pipeline) Test(org.junit.Test)

Aggregations

ChunkInfo (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ChunkInfo)16 Pipeline (org.apache.hadoop.hdds.scm.pipeline.Pipeline)6 BlockID (org.apache.hadoop.hdds.client.BlockID)5 StorageContainerException (org.apache.hadoop.hdds.scm.container.common.helpers.StorageContainerException)5 MockPipeline (org.apache.hadoop.hdds.scm.pipeline.MockPipeline)5 Test (org.junit.Test)5 ContainerBlockID (org.apache.hadoop.hdds.client.ContainerBlockID)4 BlockData (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.BlockData)4 XceiverClientSpi (org.apache.hadoop.hdds.scm.XceiverClientSpi)4 KeyValue (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.KeyValue)3 PutBlockRequestProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.PutBlockRequestProto)3 XceiverClientFactory (org.apache.hadoop.hdds.scm.XceiverClientFactory)3 IOException (java.io.IOException)2 AtomicReference (java.util.concurrent.atomic.AtomicReference)2 ContainerProtos (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos)2 ContainerCommandRequestProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandRequestProto)2 ContainerCommandResponseProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerCommandResponseProto)2 DatanodeBlockID (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.DatanodeBlockID)2 PutSmallFileRequestProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.PutSmallFileRequestProto)2 WriteChunkRequestProto (org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.WriteChunkRequestProto)2