Search in sources :

Example 6 with RawErasureDecoder

use of org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder in project hadoop by apache.

the class TestDFSStripedInputStream method testPreadWithDNFailure.

@Test
public void testPreadWithDNFailure() throws Exception {
    final int numBlocks = 4;
    final int failedDNIdx = dataBlocks - 1;
    DFSTestUtil.createStripedFile(cluster, filePath, null, numBlocks, stripesPerBlock, false, ecPolicy);
    LocatedBlocks lbs = fs.getClient().namenode.getBlockLocations(filePath.toString(), 0, blockGroupSize);
    assert lbs.get(0) instanceof LocatedStripedBlock;
    LocatedStripedBlock bg = (LocatedStripedBlock) (lbs.get(0));
    for (int i = 0; i < dataBlocks + parityBlocks; i++) {
        Block blk = new Block(bg.getBlock().getBlockId() + i, stripesPerBlock * cellSize, bg.getBlock().getGenerationStamp());
        blk.setGenerationStamp(bg.getBlock().getGenerationStamp());
        cluster.injectBlocks(i, Arrays.asList(blk), bg.getBlock().getBlockPoolId());
    }
    DFSStripedInputStream in = new DFSStripedInputStream(fs.getClient(), filePath.toString(), false, ecPolicy, null);
    int readSize = blockGroupSize;
    byte[] readBuffer = new byte[readSize];
    byte[] expected = new byte[readSize];
    /** A variation of {@link DFSTestUtil#fillExpectedBuf} for striped blocks */
    for (int i = 0; i < stripesPerBlock; i++) {
        for (int j = 0; j < dataBlocks; j++) {
            for (int k = 0; k < cellSize; k++) {
                int posInBlk = i * cellSize + k;
                int posInFile = i * cellSize * dataBlocks + j * cellSize + k;
                expected[posInFile] = SimulatedFSDataset.simulatedByte(new Block(bg.getBlock().getBlockId() + j), posInBlk);
            }
        }
    }
    ErasureCoderOptions coderOptions = new ErasureCoderOptions(dataBlocks, parityBlocks);
    RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(conf, ecPolicy.getCodecName(), coderOptions);
    // Update the expected content for decoded data
    int[] missingBlkIdx = new int[parityBlocks];
    for (int i = 0; i < missingBlkIdx.length; i++) {
        if (i == 0) {
            missingBlkIdx[i] = failedDNIdx;
        } else {
            missingBlkIdx[i] = dataBlocks + i;
        }
    }
    cluster.stopDataNode(failedDNIdx);
    for (int i = 0; i < stripesPerBlock; i++) {
        byte[][] decodeInputs = new byte[dataBlocks + parityBlocks][cellSize];
        byte[][] decodeOutputs = new byte[missingBlkIdx.length][cellSize];
        for (int j = 0; j < dataBlocks; j++) {
            int posInBuf = i * cellSize * dataBlocks + j * cellSize;
            if (j != failedDNIdx) {
                System.arraycopy(expected, posInBuf, decodeInputs[j], 0, cellSize);
            }
        }
        for (int j = dataBlocks; j < dataBlocks + parityBlocks; j++) {
            for (int k = 0; k < cellSize; k++) {
                int posInBlk = i * cellSize + k;
                decodeInputs[j][k] = SimulatedFSDataset.simulatedByte(new Block(bg.getBlock().getBlockId() + j), posInBlk);
            }
        }
        for (int m : missingBlkIdx) {
            decodeInputs[m] = null;
        }
        rawDecoder.decode(decodeInputs, missingBlkIdx, decodeOutputs);
        int posInBuf = i * cellSize * dataBlocks + failedDNIdx * cellSize;
        System.arraycopy(decodeOutputs[0], 0, expected, posInBuf, cellSize);
    }
    int delta = 10;
    int done = 0;
    // read a small delta, shouldn't trigger decode
    // |cell_0 |
    // |10     |
    done += in.read(0, readBuffer, 0, delta);
    assertEquals(delta, done);
    assertArrayEquals(Arrays.copyOf(expected, done), Arrays.copyOf(readBuffer, done));
    // both head and trail cells are partial
    // |c_0      |c_1    |c_2 |c_3 |c_4      |c_5         |
    // |256K - 10|missing|256K|256K|256K - 10|not in range|
    done += in.read(delta, readBuffer, delta, cellSize * (dataBlocks - 1) - 2 * delta);
    assertEquals(cellSize * (dataBlocks - 1) - delta, done);
    assertArrayEquals(Arrays.copyOf(expected, done), Arrays.copyOf(readBuffer, done));
    // read the rest
    done += in.read(done, readBuffer, done, readSize - done);
    assertEquals(readSize, done);
    assertArrayEquals(expected, readBuffer);
}
Also used : LocatedStripedBlock(org.apache.hadoop.hdfs.protocol.LocatedStripedBlock) ErasureCoderOptions(org.apache.hadoop.io.erasurecode.ErasureCoderOptions) RawErasureDecoder(org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder) LocatedBlocks(org.apache.hadoop.hdfs.protocol.LocatedBlocks) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) Block(org.apache.hadoop.hdfs.protocol.Block) LocatedStripedBlock(org.apache.hadoop.hdfs.protocol.LocatedStripedBlock) Test(org.junit.Test)

Example 7 with RawErasureDecoder

use of org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder in project hadoop by apache.

the class TestDFSStripedInputStream method testStatefulReadWithDNFailure.

@Test
public void testStatefulReadWithDNFailure() throws Exception {
    final int numBlocks = 4;
    final int failedDNIdx = dataBlocks - 1;
    DFSTestUtil.createStripedFile(cluster, filePath, null, numBlocks, stripesPerBlock, false, ecPolicy);
    LocatedBlocks lbs = fs.getClient().namenode.getBlockLocations(filePath.toString(), 0, blockGroupSize);
    assert lbs.get(0) instanceof LocatedStripedBlock;
    LocatedStripedBlock bg = (LocatedStripedBlock) (lbs.get(0));
    for (int i = 0; i < dataBlocks + parityBlocks; i++) {
        Block blk = new Block(bg.getBlock().getBlockId() + i, stripesPerBlock * cellSize, bg.getBlock().getGenerationStamp());
        blk.setGenerationStamp(bg.getBlock().getGenerationStamp());
        cluster.injectBlocks(i, Arrays.asList(blk), bg.getBlock().getBlockPoolId());
    }
    DFSStripedInputStream in = new DFSStripedInputStream(fs.getClient(), filePath.toString(), false, ecPolicy, null);
    int readSize = blockGroupSize;
    byte[] readBuffer = new byte[readSize];
    byte[] expected = new byte[readSize];
    /** A variation of {@link DFSTestUtil#fillExpectedBuf} for striped blocks */
    for (int i = 0; i < stripesPerBlock; i++) {
        for (int j = 0; j < dataBlocks; j++) {
            for (int k = 0; k < cellSize; k++) {
                int posInBlk = i * cellSize + k;
                int posInFile = i * cellSize * dataBlocks + j * cellSize + k;
                expected[posInFile] = SimulatedFSDataset.simulatedByte(new Block(bg.getBlock().getBlockId() + j), posInBlk);
            }
        }
    }
    ErasureCoderOptions coderOptions = new ErasureCoderOptions(dataBlocks, parityBlocks);
    RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(conf, ecPolicy.getCodecName(), coderOptions);
    // Update the expected content for decoded data
    int[] missingBlkIdx = new int[parityBlocks];
    for (int i = 0; i < missingBlkIdx.length; i++) {
        if (i == 0) {
            missingBlkIdx[i] = failedDNIdx;
        } else {
            missingBlkIdx[i] = dataBlocks + i;
        }
    }
    cluster.stopDataNode(failedDNIdx);
    for (int i = 0; i < stripesPerBlock; i++) {
        byte[][] decodeInputs = new byte[dataBlocks + parityBlocks][cellSize];
        byte[][] decodeOutputs = new byte[missingBlkIdx.length][cellSize];
        for (int j = 0; j < dataBlocks; j++) {
            int posInBuf = i * cellSize * dataBlocks + j * cellSize;
            if (j != failedDNIdx) {
                System.arraycopy(expected, posInBuf, decodeInputs[j], 0, cellSize);
            }
        }
        for (int j = dataBlocks; j < dataBlocks + parityBlocks; j++) {
            for (int k = 0; k < cellSize; k++) {
                int posInBlk = i * cellSize + k;
                decodeInputs[j][k] = SimulatedFSDataset.simulatedByte(new Block(bg.getBlock().getBlockId() + j), posInBlk);
            }
        }
        for (int m : missingBlkIdx) {
            decodeInputs[m] = null;
        }
        rawDecoder.decode(decodeInputs, missingBlkIdx, decodeOutputs);
        int posInBuf = i * cellSize * dataBlocks + failedDNIdx * cellSize;
        System.arraycopy(decodeOutputs[0], 0, expected, posInBuf, cellSize);
    }
    int delta = 10;
    int done = 0;
    // read a small delta, shouldn't trigger decode
    // |cell_0 |
    // |10     |
    done += in.read(readBuffer, 0, delta);
    assertEquals(delta, done);
    // |256K - 10|missing|256K|256K|256K - 10|not in range|
    while (done < (cellSize * (dataBlocks - 1) - 2 * delta)) {
        int ret = in.read(readBuffer, delta, cellSize * (dataBlocks - 1) - 2 * delta);
        assertTrue(ret > 0);
        done += ret;
    }
    assertEquals(cellSize * (dataBlocks - 1) - delta, done);
    // read the rest
    int restSize;
    restSize = readSize - done;
    while (done < restSize) {
        int ret = in.read(readBuffer, done, restSize);
        assertTrue(ret > 0);
        done += ret;
    }
    assertEquals(readSize, done);
    assertArrayEquals(expected, readBuffer);
}
Also used : LocatedStripedBlock(org.apache.hadoop.hdfs.protocol.LocatedStripedBlock) ErasureCoderOptions(org.apache.hadoop.io.erasurecode.ErasureCoderOptions) RawErasureDecoder(org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder) LocatedBlocks(org.apache.hadoop.hdfs.protocol.LocatedBlocks) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) Block(org.apache.hadoop.hdfs.protocol.Block) LocatedStripedBlock(org.apache.hadoop.hdfs.protocol.LocatedStripedBlock) Test(org.junit.Test)

Aggregations

RawErasureDecoder (org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder)7 ECBlock (org.apache.hadoop.io.erasurecode.ECBlock)4 Test (org.junit.Test)3 Block (org.apache.hadoop.hdfs.protocol.Block)2 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)2 LocatedBlocks (org.apache.hadoop.hdfs.protocol.LocatedBlocks)2 LocatedStripedBlock (org.apache.hadoop.hdfs.protocol.LocatedStripedBlock)2 ErasureCoderOptions (org.apache.hadoop.io.erasurecode.ErasureCoderOptions)2 RawErasureEncoder (org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder)2 DummyRawDecoder (org.apache.hadoop.io.erasurecode.rawcoder.DummyRawDecoder)1 RSRawDecoder (org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder)1 RSRawDecoderLegacy (org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoderLegacy)1 RSRawEncoder (org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder)1 RSRawEncoderLegacy (org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoderLegacy)1