use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFile method basicWithSomeCodec.
/**
* test none codecs
*/
void basicWithSomeCodec(String codec) throws IOException {
Path ncTFile = new Path(ROOT, "basic.tfile");
FSDataOutputStream fout = createFSOutput(ncTFile);
Writer writer = new Writer(fout, minBlockSize, codec, "memcmp", conf);
writeRecords(writer);
fout.close();
FSDataInputStream fin = fs.open(ncTFile);
Reader reader = new Reader(fs.open(ncTFile), fs.getFileStatus(ncTFile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.seekTo(getSomeKey(50));
assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50)));
// read the key and see if it matches
byte[] readKey = readKey(scanner);
assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), readKey));
scanner.seekTo(new byte[0]);
byte[] val1 = readValue(scanner);
scanner.seekTo(new byte[0]);
byte[] val2 = readValue(scanner);
assertTrue(Arrays.equals(val1, val2));
// check for lowerBound
scanner.lowerBound(getSomeKey(50));
assertTrue("locaton lookup failed", scanner.currentLocation.compareTo(reader.end()) < 0);
readKey = readKey(scanner);
assertTrue("seeked key does not match", Arrays.equals(readKey, getSomeKey(50)));
// check for upper bound
scanner.upperBound(getSomeKey(50));
assertTrue("location lookup failed", scanner.currentLocation.compareTo(reader.end()) < 0);
readKey = readKey(scanner);
assertTrue("seeked key does not match", Arrays.equals(readKey, getSomeKey(51)));
scanner.close();
// test for a range of scanner
scanner = reader.createScannerByKey(getSomeKey(10), getSomeKey(60));
readAndCheckbytes(scanner, 10, 50);
assertFalse(scanner.advance());
scanner.close();
reader.close();
fin.close();
fs.delete(ncTFile, true);
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureGetNonExistentMetaBlock.
@Test
public void testFailureGetNonExistentMetaBlock() throws IOException {
if (skip)
return;
writer.append("keyX".getBytes(), "valueX".getBytes());
// create a new metablock
DataOutputStream outMeta = writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName());
outMeta.write(123);
outMeta.write("foo".getBytes());
outMeta.close();
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
DataInputStream mb = reader.getMetaBlock("testX");
Assert.assertNotNull(mb);
mb.close();
try {
DataInputStream mbBad = reader.getMetaBlock("testY");
Assert.fail("Error on handling non-existent metablocks.");
} catch (Exception e) {
// noop, expecting exceptions
}
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method checkBlockIndex.
private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes());
Assert.assertEquals(blockIndexExpected, scanner.currentLocation.getBlockIndex());
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method readRecords.
static void readRecords(FileSystem fs, Path path, int count, Configuration conf) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
for (int nx = 0; nx < count; nx++, scanner.advance()) {
Assert.assertFalse(scanner.atEnd());
// Assert.assertTrue(scanner.next());
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, nx));
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + nx);
}
Assert.assertTrue(scanner.atEnd());
Assert.assertFalse(scanner.advance());
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureOpenEmptyFile.
@Test
public void testFailureOpenEmptyFile() throws IOException {
if (skip)
return;
closeOutput();
// create an absolutely empty file
path = new Path(fs.getWorkingDirectory(), outputFile);
out = fs.create(path);
out.close();
try {
new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.fail("Error on handling empty files.");
} catch (EOFException e) {
// noop, expecting exceptions
}
}
Aggregations