use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileByteArrays method testLocate.
@Test
public void testLocate() throws IOException {
if (skip)
return;
writeRecords(3 * records1stBlock);
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
locate(scanner, composeSortedKey(KEY, 2).getBytes());
locate(scanner, composeSortedKey(KEY, records1stBlock - 1).getBytes());
locate(scanner, composeSortedKey(KEY, records1stBlock).getBytes());
Location locX = locate(scanner, "keyX".getBytes());
Assert.assertEquals(scanner.endLocation, locX);
scanner.close();
reader.close();
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileUnsortedByteArrays method testScan.
// we still can scan records in an unsorted TFile
@Test
public void testScan() throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
Scanner scanner = reader.createScanner();
try {
// read key and value
byte[] kbuf = new byte[BUF_SIZE];
int klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), "keyZ");
byte[] vbuf = new byte[BUF_SIZE];
int vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), "valueZ");
scanner.advance();
// now try get value first
vbuf = new byte[BUF_SIZE];
vlen = scanner.entry().getValueLength();
scanner.entry().getValue(vbuf);
Assert.assertEquals(new String(vbuf, 0, vlen), "valueM");
kbuf = new byte[BUF_SIZE];
klen = scanner.entry().getKeyLength();
scanner.entry().getKey(kbuf);
Assert.assertEquals(new String(kbuf, 0, klen), "keyM");
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileUnsortedByteArrays method testFailureSeek.
@Test
public void testFailureSeek() throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Scanner scanner = reader.createScanner();
try {
// can't find ceil
try {
scanner.lowerBound("keyN".getBytes());
Assert.fail("Cannot search in a unsorted TFile!");
} catch (Exception e) {
// noop, expecting excetions
} finally {
}
// can't find higher
try {
scanner.upperBound("keyA".getBytes());
Assert.fail("Cannot search higher in a unsorted TFile!");
} catch (Exception e) {
// noop, expecting excetions
} finally {
}
// can't seek
try {
scanner.seekTo("keyM".getBytes());
Assert.fail("Cannot search a unsorted TFile!");
} catch (Exception e) {
// noop, expecting excetions
} finally {
}
} finally {
scanner.close();
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileUnsortedByteArrays method testFailureScannerWithKeys.
// we still can scan records in an unsorted TFile
@Test
public void testFailureScannerWithKeys() throws IOException {
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
Assert.assertFalse(reader.isSorted());
Assert.assertEquals((int) reader.getEntryCount(), 4);
try {
Scanner scanner = reader.createScannerByKey("aaa".getBytes(), "zzz".getBytes());
Assert.fail("Failed to catch creating scanner with keys on unsorted file.");
} catch (RuntimeException e) {
} finally {
reader.close();
}
}
use of org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner in project hadoop by apache.
the class TestTFileSplit method readFile.
void readFile() throws IOException {
long fileLength = fs.getFileStatus(path).getLen();
int numSplit = 10;
long splitSize = fileLength / numSplit + 1;
Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf);
long offset = 0;
long rowCount = 0;
BytesWritable key, value;
for (int i = 0; i < numSplit; ++i, offset += splitSize) {
Scanner scanner = reader.createScannerByByteRange(offset, splitSize);
int count = 0;
key = new BytesWritable();
value = new BytesWritable();
while (!scanner.atEnd()) {
scanner.entry().get(key, value);
++count;
scanner.advance();
}
scanner.close();
assertTrue(count > 0);
rowCount += count;
}
assertEquals(rowCount, reader.getEntryCount());
reader.close();
}
Aggregations