use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.
the class HStoreFile method createStreamReader.
private StoreFileReader createStreamReader(boolean canUseDropBehind) throws IOException {
initReader();
final boolean doDropBehind = canUseDropBehind && cacheConf.shouldDropBehindCompaction();
ReaderContext context = fileInfo.createReaderContext(doDropBehind, -1, ReaderType.STREAM);
StoreFileReader reader = fileInfo.preStoreFileReaderOpen(context, cacheConf);
if (reader == null) {
reader = fileInfo.createReader(context, cacheConf);
// steam reader need copy stuffs from pread reader
reader.copyFields(initialReader);
}
return fileInfo.postStoreFileReaderOpen(context, cacheConf, reader);
}
use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.
the class TestHalfStoreFileReader method doTestOfSeekBefore.
private Cell doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, Cell seekBefore, CacheConfig cacheConfig) throws IOException {
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build();
HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration());
final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConfig, bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration());
fileInfo.initMetaAndIndex(halfreader.getHFileReader());
halfreader.loadFileInfo();
final HFileScanner scanner = halfreader.getScanner(false, false);
scanner.seekBefore(seekBefore);
return scanner.getCell();
}
use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.
the class TestStoreFileInfo method testOpenErrorMessageHFileLink.
@Test
public void testOpenErrorMessageHFileLink() throws IOException, IllegalStateException {
// Test file link exception
// Try to open nonsense hfilelink. Make sure exception is from HFileLink.
Path p = new Path("/hbase/test/0123/cf/testtb=4567-abcd");
try (FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration())) {
StoreFileInfo sfi = new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, p, true);
try {
ReaderContext context = sfi.createReaderContext(false, 1000, ReaderType.PREAD);
sfi.createReader(context, null);
throw new IllegalStateException();
} catch (FileNotFoundException fnfe) {
assertTrue(fnfe.getMessage().contains(HFileLink.class.getSimpleName()));
}
}
}
use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.
the class TestStoreFileInfo method testOpenErrorMessageReference.
@Test
public void testOpenErrorMessageReference() throws IOException {
// Test file link exception
// Try to open nonsense hfilelink. Make sure exception is from HFileLink.
Path p = new Path(TEST_UTIL.getDataTestDirOnTestFS(), "4567.abcd");
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
fs.mkdirs(p.getParent());
Reference r = Reference.createBottomReference(HConstants.EMPTY_START_ROW);
r.write(fs, p);
StoreFileInfo sfi = new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, p, true);
try {
ReaderContext context = sfi.createReaderContext(false, 1000, ReaderType.PREAD);
sfi.createReader(context, null);
throw new IllegalStateException();
} catch (FileNotFoundException fnfe) {
assertTrue(fnfe.getMessage().contains("->"));
}
}
use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.
the class TestRowPrefixBloomFilter method testRowPrefixBloomFilterWithGet.
@Test
public void testRowPrefixBloomFilterWithGet() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
int expKeys = fixedLengthExpKeys;
// write the file
Path f = new Path(testDir, name.getMethodName());
writeStoreFile(f, bt, expKeys);
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader());
reader.loadFileInfo();
reader.loadBloomfilter();
StoreFileScanner scanner = getStoreFileScanner(reader);
HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
// Get with valid row style
// prefix row in bloom
String prefixRow = String.format(prefixFormatter, prefixRowCount - 2);
String row = generateRowWithSuffix(prefixRow, 0);
Scan scan = new Scan(new Get(Bytes.toBytes(row)));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists);
// prefix row not in bloom
prefixRow = String.format(prefixFormatter, prefixRowCount - 1);
row = generateRowWithSuffix(prefixRow, 0);
scan = new Scan(new Get(Bytes.toBytes(row)));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertFalse(exists);
// Get with invalid row style
// ROWPREFIX: the length of row is less than prefixLength
// row in bloom
row = String.format(invalidFormatter, prefixRowCount + 2);
scan = new Scan(new Get(Bytes.toBytes(row)));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists);
// row not in bloom
row = String.format(invalidFormatter, prefixRowCount + 1);
scan = new Scan(new Get(Bytes.toBytes(row)));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertFalse(exists);
// evict because we are about to delete the file
reader.close(true);
fs.delete(f, true);
}
Aggregations