Search in sources :

Example 11 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureWriterNotClosed.

@Test
public void testFailureWriterNotClosed() throws IOException {
    if (skip)
        return;
    Reader reader = null;
    try {
        reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
        Assert.fail("Cannot read before closing the writer.");
    } catch (IOException e) {
    // noop, expecting exceptions
    } finally {
        if (reader != null) {
            reader.close();
        }
    }
}
Also used : Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) Test(org.junit.Test)

Example 12 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureNegativeLength_2.

@Test
public void testFailureNegativeLength_2() throws IOException {
    if (skip)
        return;
    closeOutput();
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScanner();
    try {
        scanner.lowerBound("keyX".getBytes(), 0, -1);
        Assert.fail("Error on handling negative length.");
    } catch (Exception e) {
    // noop, expecting exceptions
    } finally {
        scanner.close();
        reader.close();
    }
    closeOutput();
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) EOFException(java.io.EOFException) Test(org.junit.Test)

Example 13 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureNegativeOffset_2.

@Test
public void testFailureNegativeOffset_2() throws IOException {
    if (skip)
        return;
    closeOutput();
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Scanner scanner = reader.createScanner();
    try {
        scanner.lowerBound("keyX".getBytes(), -1, 4);
        Assert.fail("Error on handling negative offset.");
    } catch (Exception e) {
    // noop, expecting exceptions
    } finally {
        reader.close();
        scanner.close();
    }
    closeOutput();
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) EOFException(java.io.EOFException) Test(org.junit.Test)

Example 14 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testNoDataEntry.

@Test
public void testNoDataEntry() throws IOException {
    if (skip)
        return;
    closeOutput();
    Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
    Assert.assertTrue(reader.isSorted());
    Scanner scanner = reader.createScanner();
    Assert.assertTrue(scanner.atEnd());
    scanner.close();
    reader.close();
}
Also used : Scanner(org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) Test(org.junit.Test)

Example 15 with Reader

use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.

the class TestTFileByteArrays method testFailureOpenRandomFile.

@Test
public void testFailureOpenRandomFile() throws IOException {
    if (skip)
        return;
    closeOutput();
    // create an random file
    path = new Path(fs.getWorkingDirectory(), outputFile);
    out = fs.create(path);
    Random rand = new Random();
    byte[] buf = new byte[K];
    // fill with > 1MB data
    for (int nx = 0; nx < K + 2; nx++) {
        rand.nextBytes(buf);
        out.write(buf);
    }
    out.close();
    try {
        new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
        Assert.fail("Error on handling random files.");
    } catch (IOException e) {
    // noop, expecting exceptions
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Random(java.util.Random) Reader(org.apache.hadoop.io.file.tfile.TFile.Reader) IOException(java.io.IOException) Test(org.junit.Test)

Aggregations

Reader (org.apache.hadoop.io.file.tfile.TFile.Reader)28 Scanner (org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner)22 Test (org.junit.Test)16 IOException (java.io.IOException)9 EOFException (java.io.EOFException)7 Path (org.apache.hadoop.fs.Path)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 BytesWritable (org.apache.hadoop.io.BytesWritable)3 Writer (org.apache.hadoop.io.file.tfile.TFile.Writer)3 DataInputStream (java.io.DataInputStream)1 DataOutputStream (java.io.DataOutputStream)1 Random (java.util.Random)1 Location (org.apache.hadoop.io.file.tfile.TFile.Reader.Location)1