use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method readValueBeforeKey.
private void readValueBeforeKey(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
try {
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex);
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex));
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method readKeyWithoutValue.
private void readKeyWithoutValue(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
try {
// read the indexed key
byte[] kbuf1 = new byte[BUF_SIZE];
int klen1 = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf1);
Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex));
if (scanner.advance() && !scanner.atEnd()) {
// read the next key following the indexed
byte[] kbuf2 = new byte[BUF_SIZE];
int klen2 = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf2);
Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1));
}
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method testFailureReadValueManyTimes.
@Test
public void testFailureReadValueManyTimes() throws IOException {
if (skip)
return;
writeRecords(5);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0);
try {
scanner.entry().getValue(vbuf);
Assert.fail("Cannot get the value mlutiple times.");
} catch (Exception e) {
// noop, expecting exceptions
}
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeLength_2.
@Test
public void testFailureNegativeLength_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), 0, -1);
Assert.fail("Error on handling negative length.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
scanner.close();
reader.close();
}
closeOutput();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeOffset_2.
@Test
public void testFailureNegativeOffset_2() throws IOException {
if (skip)
return;
closeOutput();
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
scanner.lowerBound("keyX".getBytes(), -1, 4);
Assert.fail("Error on handling negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
} finally {
reader.close();
scanner.close();
}
closeOutput();
}
Aggregations