use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method readValueWithoutKey.
private void readValueWithoutKey(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1));
}
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeLength_3.
@Test
public void testFailureNegativeLength_3() throws IOException {
if (skip)
return;
writeRecords(3);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
// test negative array offset
try {
scanner.seekTo("keyY".getBytes(), -1, 4);
Assert.fail("Failed to handle negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
}
// test negative array length
try {
scanner.seekTo("keyY".getBytes(), 0, -2);
Assert.fail("Failed to handle negative key length.");
} catch (Exception e) {
// noop, expecting exceptions
}
} finally {
reader.close();
scanner.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method readValueBeforeKey.
private void readValueBeforeKey(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex));
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method readKeyWithoutValue.
private void readKeyWithoutValue(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
try {
// read the indexed key
byte[] kbuf1 = new byte[BUF_SIZE];
int klen1 = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf1);
Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex));
if (scanner.advance() && !scanner.atEnd()) {
// read the next key following the indexed
byte[] kbuf2 = new byte[BUF_SIZE];
int klen2 = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf2);
Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1));
}
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader in project hadoop by apache.
the class TestTFileByteArrays method testFailureReadValueManyTimes.
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
Aggregations