Search in sources :

Example 6 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method readValueWithoutKey.

private void readValueWithoutKey(int recordIndex) throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
    byte[] vbuf1 = new byte[BUF_SIZE];
    int vlen1 = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf1);
    Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
    if (scanner.advance() && !scanner.atEnd()) {
        byte[] vbuf2 = new byte[BUF_SIZE];
        int vlen2 = scanner.entry().getValueLength();
        scanner.entry().getValue(vbuf2);
        Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1));
    }
    scanner.close();
    reader.close();
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader)

Example 7 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureNegativeLength_3.

@Test
public void testFailureNegativeLength_3() throws IOException {
    if (skip)
        return;
    writeRecords(3);
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScanner();
    try {
        // test negative array offset
        try {
            scanner.seekTo("keyY".getBytes(), -1, 4);
            Assert.fail("Failed to handle negative offset.");
        } catch (Exception e) {
        // noop, expecting exceptions
        }
        // test negative array length
        try {
            scanner.seekTo("keyY".getBytes(), 0, -2);
            Assert.fail("Failed to handle negative key length.");
        } catch (Exception e) {
        // noop, expecting exceptions
        }
    } finally {
        reader.close();
        scanner.close();
    }
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) EOFException(java.io.EOFException) Test(org.junit.Test)

Example 8 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method readValueBeforeKey.

private void readValueBeforeKey(int recordIndex) throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
    try {
        byte[] vbuf = new byte[BUF_SIZE];
        int vlen = scanner.entry().getValueLength();
        scanner.entry().getValue(vbuf);
        Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
        byte[] kbuf = new byte[BUF_SIZE];
        int klen = scanner.entry().getKeyLength();
        scanner.entry().getKey(kbuf);
        Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex));
    } finally {
        scanner.close();
        reader.close();
    }
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader)

Example 9 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method readKeyWithoutValue.

private void readKeyWithoutValue(int recordIndex) throws IOException {
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
    try {
        // read the indexed key
        byte[] kbuf1 = new byte[BUF_SIZE];
        int klen1 = scanner.entry().getKeyLength();
        scanner.entry().getKey(kbuf1);
        Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex));
        if (scanner.advance() && !scanner.atEnd()) {
            // read the next key following the indexed
            byte[] kbuf2 = new byte[BUF_SIZE];
            int klen2 = scanner.entry().getKeyLength();
            scanner.entry().getKey(kbuf2);
            Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1));
        }
    } finally {
        scanner.close();
        reader.close();
    }
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader)

Example 10 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureReadValueManyTimes.

@Test
public void testFailureReadValueManyTimes() throws IOException {
    if (skip)
        return;
    writeRecords(5);
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScanner();
    byte[] vbuf = new byte[BUF_SIZE];
    int vlen = scanner.entry().getValueLength();
    scanner.entry().getValue(vbuf);
    Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
    try {
        scanner.entry().getValue(vbuf);
        Assert.fail("Cannot get the value mlutiple times.");
    } catch (Exception e) {
    // noop, expecting exceptions
    }
    scanner.close();
    reader.close();
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) EOFException(java.io.EOFException) Test(org.junit.Test)

Aggregations

Reader (org.apache.hadoop.io.file.tfile.TFile.Reader)28 Scanner (org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner)22 Test (org.junit.Test)16 IOException (java.io.IOException)9 EOFException (java.io.EOFException)7 Path (org.apache.hadoop.fs.Path)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 BytesWritable (org.apache.hadoop.io.BytesWritable)3 Writer (org.apache.hadoop.io.file.tfile.TFile.Writer)3 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1 Random (java.util.Random)1 Location (org.apache.hadoop.io.file.tfile.TFile.Reader.Location)1