Search in sources :

Example 11 with FSDataInputStream

use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.

the class DFSClientCache method getDfsInputStream.

FSDataInputStream getDfsInputStream(String userName, String inodePath) {
    DFSInputStreamCaheKey k = new DFSInputStreamCaheKey(userName, inodePath);
    FSDataInputStream s = null;
    try {
        s = inputstreamCache.get(k);
    } catch (ExecutionException e) {
        LOG.warn("Failed to create DFSInputStream for user:" + userName + " Cause:" + e);
    }
    return s;
}
Also used : FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ExecutionException(java.util.concurrent.ExecutionException)

Example 12 with FSDataInputStream

use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.

the class StripedFileTestUtil method verifyStatefulRead.

static void verifyStatefulRead(FileSystem fs, Path srcPath, int fileLength, byte[] expected, byte[] buf) throws IOException {
    try (FSDataInputStream in = fs.open(srcPath)) {
        final byte[] result = new byte[fileLength];
        int readLen = 0;
        int ret;
        while ((ret = in.read(buf, 0, buf.length)) >= 0) {
            System.arraycopy(buf, 0, result, readLen, ret);
            readLen += ret;
        }
        assertEquals("The length of file should be the same to write size", fileLength, readLen);
        Assert.assertArrayEquals(expected, result);
    }
}
Also used : FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream)

Example 13 with FSDataInputStream

use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.

the class StripedFileTestUtil method verifyPread.

static void verifyPread(FileSystem fs, Path srcPath, int fileLength, byte[] expected, byte[] buf) throws IOException {
    final ErasureCodingPolicy ecPolicy = ((DistributedFileSystem) fs).getErasureCodingPolicy(srcPath);
    try (FSDataInputStream in = fs.open(srcPath)) {
        int[] startOffsets = { 0, 1, ecPolicy.getCellSize() - 102, ecPolicy.getCellSize(), ecPolicy.getCellSize() + 102, ecPolicy.getCellSize() * (ecPolicy.getNumDataUnits() - 1), ecPolicy.getCellSize() * (ecPolicy.getNumDataUnits() - 1) + 102, ecPolicy.getCellSize() * ecPolicy.getNumDataUnits(), fileLength - 102, fileLength - 1 };
        for (int startOffset : startOffsets) {
            startOffset = Math.max(0, Math.min(startOffset, fileLength - 1));
            int remaining = fileLength - startOffset;
            int offset = startOffset;
            final byte[] result = new byte[remaining];
            while (remaining > 0) {
                int target = Math.min(remaining, buf.length);
                in.readFully(offset, buf, 0, target);
                System.arraycopy(buf, 0, result, offset - startOffset, target);
                remaining -= target;
                offset += target;
            }
            for (int i = 0; i < fileLength - startOffset; i++) {
                assertEquals("Byte at " + (startOffset + i) + " is different, " + "the startOffset is " + startOffset, expected[startOffset + i], result[i]);
            }
        }
    }
}
Also used : ErasureCodingPolicy(org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream)

Example 14 with FSDataInputStream

use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.

the class StripedFileTestUtil method verifyStatefulRead.

static void verifyStatefulRead(FileSystem fs, Path srcPath, int fileLength, byte[] expected, ByteBuffer buf) throws IOException {
    try (FSDataInputStream in = fs.open(srcPath)) {
        ByteBuffer result = ByteBuffer.allocate(fileLength);
        int readLen = 0;
        int ret;
        while ((ret = in.read(buf)) >= 0) {
            readLen += ret;
            buf.flip();
            result.put(buf);
            buf.clear();
        }
        assertEquals("The length of file should be the same to write size", fileLength, readLen);
        Assert.assertArrayEquals(expected, result.array());
    }
}
Also used : FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ByteBuffer(java.nio.ByteBuffer)

Example 15 with FSDataInputStream

use of org.apache.hadoop.fs.FSDataInputStream in project hadoop by apache.

the class StripedFileTestUtil method verifySeek.

static void verifySeek(FileSystem fs, Path srcPath, int fileLength, ErasureCodingPolicy ecPolicy, int blkGroupSize) throws IOException {
    try (FSDataInputStream in = fs.open(srcPath)) {
        // seek to 1/2 of content
        int pos = fileLength / 2;
        assertSeekAndRead(in, pos, fileLength);
        // seek to 1/3 of content
        pos = fileLength / 3;
        assertSeekAndRead(in, pos, fileLength);
        // seek to 0 pos
        pos = 0;
        assertSeekAndRead(in, pos, fileLength);
        if (fileLength > ecPolicy.getCellSize()) {
            // seek to cellSize boundary
            pos = ecPolicy.getCellSize() - 1;
            assertSeekAndRead(in, pos, fileLength);
        }
        if (fileLength > ecPolicy.getCellSize() * ecPolicy.getNumDataUnits()) {
            // seek to striped cell group boundary
            pos = ecPolicy.getCellSize() * ecPolicy.getNumDataUnits() - 1;
            assertSeekAndRead(in, pos, fileLength);
        }
        if (fileLength > blkGroupSize) {
            // seek to striped block group boundary
            pos = blkGroupSize - 1;
            assertSeekAndRead(in, pos, fileLength);
        }
        if (!(in.getWrappedStream() instanceof WebHdfsInputStream)) {
            try {
                in.seek(-1);
                Assert.fail("Should be failed if seek to negative offset");
            } catch (EOFException e) {
            // expected
            }
            try {
                in.seek(fileLength + 1);
                Assert.fail("Should be failed if seek after EOF");
            } catch (EOFException e) {
            // expected
            }
        }
    }
}
Also used : EOFException(java.io.EOFException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) WebHdfsInputStream(org.apache.hadoop.hdfs.web.WebHdfsFileSystem.WebHdfsInputStream)

Aggregations

FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)431 Path (org.apache.hadoop.fs.Path)271 FileSystem (org.apache.hadoop.fs.FileSystem)143 Test (org.junit.Test)135 IOException (java.io.IOException)125 Configuration (org.apache.hadoop.conf.Configuration)94 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)93 FileStatus (org.apache.hadoop.fs.FileStatus)62 InputStreamReader (java.io.InputStreamReader)37 BufferedReader (java.io.BufferedReader)36 FileNotFoundException (java.io.FileNotFoundException)26 IgfsPath (org.apache.ignite.igfs.IgfsPath)26 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)21 ArrayList (java.util.ArrayList)20 Random (java.util.Random)19 EOFException (java.io.EOFException)18 HashMap (java.util.HashMap)16 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)15 URI (java.net.URI)14 File (java.io.File)13