use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureWriterNotClosed.
@Test
public void testFailureWriterNotClosed() throws IOException {
if (skip)
return;
Reader reader = null;
try {
reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Cannot read before closing the writer.");
} catch (IOException e) {
// noop, expecting exceptions
} finally {
if (reader != null) {
reader.close();
}
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeLength_2.
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeOffset_2.
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testNoDataEntry.
@Test
public void testNoDataEntry() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertTrue(reader.isSorted());
Scanner scanner = reader.createScanner();
Assert.assertTrue(scanner.atEnd());
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureOpenRandomFile.
@Test
public void testFailureOpenRandomFile() throws IOException {
if (skip)
return;
closeOutput();
// create an random file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
Random rand = new Random();
byte[] buf = new byte[K];
// fill with > 1MB data
for (int nx = 0; nx < K + 2; nx++) {
rand.nextBytes(buf);
out.write(buf);
}
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling random files.");
} catch (IOException e) {
// noop, expecting exceptions
}
}
Aggregations