Search in sources :

Example 1 with ReaderContext

use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.

the class HStoreFile method createStreamReader.

private StoreFileReader createStreamReader(boolean canUseDropBehind) throws IOException {
    initReader();
    final boolean doDropBehind = canUseDropBehind && cacheConf.shouldDropBehindCompaction();
    ReaderContext context = fileInfo.createReaderContext(doDropBehind, -1, ReaderType.STREAM);
    StoreFileReader reader = fileInfo.preStoreFileReaderOpen(context, cacheConf);
    if (reader == null) {
        reader = fileInfo.createReader(context, cacheConf);
        // steam reader need copy stuffs from pread reader
        reader.copyFields(initialReader);
    }
    return fileInfo.postStoreFileReaderOpen(context, cacheConf, reader);
}
Also used : ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext)

Example 2 with ReaderContext

use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.

the class TestHalfStoreFileReader method doTestOfSeekBefore.

private Cell doTestOfSeekBefore(Path p, FileSystem fs, Reference bottom, Cell seekBefore, CacheConfig cacheConfig) throws IOException {
    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, p).build();
    HFileInfo fileInfo = new HFileInfo(context, TEST_UTIL.getConfiguration());
    final HalfStoreFileReader halfreader = new HalfStoreFileReader(context, fileInfo, cacheConfig, bottom, new AtomicInteger(0), TEST_UTIL.getConfiguration());
    fileInfo.initMetaAndIndex(halfreader.getHFileReader());
    halfreader.loadFileInfo();
    final HFileScanner scanner = halfreader.getScanner(false, false);
    scanner.seekBefore(seekBefore);
    return scanner.getCell();
}
Also used : AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext) ReaderContextBuilder(org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileInfo(org.apache.hadoop.hbase.io.hfile.HFileInfo)

Example 3 with ReaderContext

use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.

the class TestStoreFileInfo method testOpenErrorMessageHFileLink.

@Test
public void testOpenErrorMessageHFileLink() throws IOException, IllegalStateException {
    // Test file link exception
    // Try to open nonsense hfilelink. Make sure exception is from HFileLink.
    Path p = new Path("/hbase/test/0123/cf/testtb=4567-abcd");
    try (FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration())) {
        StoreFileInfo sfi = new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, p, true);
        try {
            ReaderContext context = sfi.createReaderContext(false, 1000, ReaderType.PREAD);
            sfi.createReader(context, null);
            throw new IllegalStateException();
        } catch (FileNotFoundException fnfe) {
            assertTrue(fnfe.getMessage().contains(HFileLink.class.getSimpleName()));
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HFileLink(org.apache.hadoop.hbase.io.HFileLink) FileSystem(org.apache.hadoop.fs.FileSystem) ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext) FileNotFoundException(java.io.FileNotFoundException) Test(org.junit.Test)

Example 4 with ReaderContext

use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.

the class TestStoreFileInfo method testOpenErrorMessageReference.

@Test
public void testOpenErrorMessageReference() throws IOException {
    // Test file link exception
    // Try to open nonsense hfilelink. Make sure exception is from HFileLink.
    Path p = new Path(TEST_UTIL.getDataTestDirOnTestFS(), "4567.abcd");
    FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
    fs.mkdirs(p.getParent());
    Reference r = Reference.createBottomReference(HConstants.EMPTY_START_ROW);
    r.write(fs, p);
    StoreFileInfo sfi = new StoreFileInfo(TEST_UTIL.getConfiguration(), fs, p, true);
    try {
        ReaderContext context = sfi.createReaderContext(false, 1000, ReaderType.PREAD);
        sfi.createReader(context, null);
        throw new IllegalStateException();
    } catch (FileNotFoundException fnfe) {
        assertTrue(fnfe.getMessage().contains("->"));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Reference(org.apache.hadoop.hbase.io.Reference) FileSystem(org.apache.hadoop.fs.FileSystem) ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext) FileNotFoundException(java.io.FileNotFoundException) Test(org.junit.Test)

Example 5 with ReaderContext

use of org.apache.hadoop.hbase.io.hfile.ReaderContext in project hbase by apache.

the class TestRowPrefixBloomFilter method testRowPrefixBloomFilterWithGet.

@Test
public void testRowPrefixBloomFilterWithGet() throws Exception {
    FileSystem fs = FileSystem.getLocal(conf);
    int expKeys = fixedLengthExpKeys;
    // write the file
    Path f = new Path(testDir, name.getMethodName());
    writeStoreFile(f, bt, expKeys);
    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
    HFileInfo fileInfo = new HFileInfo(context, conf);
    StoreFileReader reader = new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
    fileInfo.initMetaAndIndex(reader.getHFileReader());
    reader.loadFileInfo();
    reader.loadBloomfilter();
    StoreFileScanner scanner = getStoreFileScanner(reader);
    HStore store = mock(HStore.class);
    when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
    // Get with valid row style
    // prefix row in bloom
    String prefixRow = String.format(prefixFormatter, prefixRowCount - 2);
    String row = generateRowWithSuffix(prefixRow, 0);
    Scan scan = new Scan(new Get(Bytes.toBytes(row)));
    boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
    assertTrue(exists);
    // prefix row not in bloom
    prefixRow = String.format(prefixFormatter, prefixRowCount - 1);
    row = generateRowWithSuffix(prefixRow, 0);
    scan = new Scan(new Get(Bytes.toBytes(row)));
    exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
    assertFalse(exists);
    // Get with invalid row style
    // ROWPREFIX: the length of row is less than prefixLength
    // row in bloom
    row = String.format(invalidFormatter, prefixRowCount + 2);
    scan = new Scan(new Get(Bytes.toBytes(row)));
    exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
    assertTrue(exists);
    // row not in bloom
    row = String.format(invalidFormatter, prefixRowCount + 1);
    scan = new Scan(new Get(Bytes.toBytes(row)));
    exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
    assertFalse(exists);
    // evict because we are about to delete the file
    reader.close(true);
    fs.delete(f, true);
}
Also used : Path(org.apache.hadoop.fs.Path) HFileInfo(org.apache.hadoop.hbase.io.hfile.HFileInfo) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) FileSystem(org.apache.hadoop.fs.FileSystem) ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext) Get(org.apache.hadoop.hbase.client.Get) ReaderContextBuilder(org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Aggregations

ReaderContext (org.apache.hadoop.hbase.io.hfile.ReaderContext)15 AtomicInteger (java.util.concurrent.atomic.AtomicInteger)11 HFileInfo (org.apache.hadoop.hbase.io.hfile.HFileInfo)11 ReaderContextBuilder (org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder)11 Path (org.apache.hadoop.fs.Path)10 Test (org.junit.Test)9 FileSystem (org.apache.hadoop.fs.FileSystem)7 KeyValue (org.apache.hadoop.hbase.KeyValue)6 Scan (org.apache.hadoop.hbase.client.Scan)5 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)5 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)5 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)3 FileNotFoundException (java.io.FileNotFoundException)2 IOException (java.io.IOException)2 TreeSet (java.util.TreeSet)2 Cell (org.apache.hadoop.hbase.Cell)2 Get (org.apache.hadoop.hbase.client.Get)2 InterruptedIOException (java.io.InterruptedIOException)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1