use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileUnsortedByteArrays method testScanRange.
// we still can scan records in an unsorted TFile
@Test
public void testScanRange() throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
Scanner scanner = reader.createScanner();
try {
// read key and value
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), "keyZ");
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), "valueZ");
scanner.advance();
// now try get value first
vbuf = new byte[BUF_SIZE];
vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), "valueM");
kbuf = new byte[BUF_SIZE];
klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), "keyM");
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileSeek method seekTFile.
public void seekTFile() throws IOException {
int miss = 0;
long totalBytes = 0;
FSDataInputStream fsdis = fs.open(path);
Reader reader = new Reader(fsdis, fs.getFileStatus(path).getLen(), conf);
KeySampler kSampler = new KeySampler(rng, reader.getFirstKey(), reader.getLastKey(), keyLenGen);
Scanner scanner = reader.createScanner();
BytesWritable key = new BytesWritable();
BytesWritable val = new BytesWritable();
timer.reset();
timer.start();
for (int i = 0; i < options.seekCount; ++i) {
kSampler.next(key);
scanner.lowerBound(key.getBytes(), 0, key.getLength());
if (!scanner.atEnd()) {
scanner.entry().get(key, val);
totalBytes += key.getLength();
totalBytes += val.getLength();
} else {
++miss;
}
}
timer.stop();
// in us.
double duration = (double) timer.read() / 1000;
System.out.printf("time: %s...avg seek: %s...%d hit...%d miss...avg I/O size: %.2fKB\n", timer.toString(), NanoTimer.nanoTimeToString(timer.read() / options.seekCount), options.seekCount - miss, miss, (double) totalBytes / 1024 / (options.seekCount - miss));
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFile method unsortedWithSomeCodec.
// unsorted with some codec
void unsortedWithSomeCodec(String codec) throws IOException {
Path uTfile = new Path(ROOT, "unsorted.tfile");
FSDataOutputStream fout = createFSOutput(uTfile);
Writer writer = new Writer(fout, minBlockSize, codec, null, conf);
writeRecords(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(uTfile);
Reader reader = new Reader(fs.open(uTfile), fs.getFileStatus(uTfile).getLen(), conf);
Scanner scanner = reader.createScanner();
readAllRecords(scanner);
scanner.close();
reader.close();
fin.close();
fs.delete(uTfile, true);
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method readValueWithoutKey.
private void readValueWithoutKey(int recordIndex) throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScannerByKey(composeSortedKey(KEY, recordIndex).getBytes(), null);
byte[] vbuf1 = new byte[BUF_SIZE];
int vlen1 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf1);
Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex);
if (scanner.advance() && !scanner.atEnd()) {
byte[] vbuf2 = new byte[BUF_SIZE];
int vlen2 = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf2);
Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1));
}
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method testFailureNegativeLength_3.
@Test
public void testFailureNegativeLength_3() throws IOException {
if (skip)
return;
writeRecords(3);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
// test negative array offset
try {
scanner.seekTo("keyY".getBytes(), -1, 4);
Assert.fail("Failed to handle negative offset.");
} catch (Exception e) {
// noop, expecting exceptions
}
// test negative array length
try {
scanner.seekTo("keyY".getBytes(), 0, -2);
Assert.fail("Failed to handle negative key length.");
} catch (Exception e) {
// noop, expecting exceptions
}
} finally {
reader.close();
scanner.close();
}
}
Aggregations