Search in sources :

Example 11 with HFileScanner

use of org.apache.hadoop.hbase.io.hfile.HFileScanner in project hbase by apache.

the class TestHalfStoreFileReader method doTestOfScanAndReseek.

private void doTestOfScanAndReseek(Path p, FileSystem fs, Reference bottom, CacheConfig cacheConf) throws IOException {
    final HalfStoreFileReader halfreader = new HalfStoreFileReader(fs, p, cacheConf, bottom, TEST_UTIL.getConfiguration());
    halfreader.loadFileInfo();
    final HFileScanner scanner = halfreader.getScanner(false, false);
    scanner.seekTo();
    Cell curr;
    do {
        curr = scanner.getCell();
        KeyValue reseekKv = getLastOnCol(curr);
        int ret = scanner.reseekTo(reseekKv);
        assertTrue("reseek to returned: " + ret, ret > 0);
    //System.out.println(curr + ": " + ret);
    } while (scanner.next());
    int ret = scanner.reseekTo(getLastOnCol(curr));
    //System.out.println("Last reseek: " + ret);
    assertTrue(ret > 0);
    halfreader.close(true);
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) Cell(org.apache.hadoop.hbase.Cell)

Example 12 with HFileScanner

use of org.apache.hadoop.hbase.io.hfile.HFileScanner in project hbase by apache.

the class TestHFileOutputFormat2 method test_WritingTagData.

/**
   * Test that {@link HFileOutputFormat2} RecordWriter writes tags such as ttl into
   * hfile.
   */
@Test
public void test_WritingTagData() throws Exception {
    Configuration conf = new Configuration(this.util.getConfiguration());
    final String HFILE_FORMAT_VERSION_CONF_KEY = "hfile.format.version";
    conf.setInt(HFILE_FORMAT_VERSION_CONF_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS);
    RecordWriter<ImmutableBytesWritable, Cell> writer = null;
    TaskAttemptContext context = null;
    Path dir = util.getDataTestDir("WritingTagData");
    try {
        Job job = new Job(conf);
        FileOutputFormat.setOutputPath(job, dir);
        context = createTestTaskAttemptContext(job);
        HFileOutputFormat2 hof = new HFileOutputFormat2();
        writer = hof.getRecordWriter(context);
        final byte[] b = Bytes.toBytes("b");
        List<Tag> tags = new ArrayList<>();
        tags.add(new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(978670)));
        KeyValue kv = new KeyValue(b, b, b, HConstants.LATEST_TIMESTAMP, b, tags);
        writer.write(new ImmutableBytesWritable(), kv);
        writer.close(context);
        writer = null;
        FileSystem fs = dir.getFileSystem(conf);
        RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(dir, true);
        while (iterator.hasNext()) {
            LocatedFileStatus keyFileStatus = iterator.next();
            HFile.Reader reader = HFile.createReader(fs, keyFileStatus.getPath(), new CacheConfig(conf), conf);
            HFileScanner scanner = reader.getScanner(false, false, false);
            scanner.seekTo();
            Cell cell = scanner.getCell();
            List<Tag> tagsFromCell = TagUtil.asList(cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength());
            assertTrue(tagsFromCell.size() > 0);
            for (Tag tag : tagsFromCell) {
                assertTrue(tag.getType() == TagType.TTL_TAG_TYPE);
            }
        }
    } finally {
        if (writer != null && context != null)
            writer.close(context);
        dir.getFileSystem(conf).delete(dir, true);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ImmutableBytesWritable(org.apache.hadoop.hbase.io.ImmutableBytesWritable) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) ArrayList(java.util.ArrayList) LocatedFileStatus(org.apache.hadoop.fs.LocatedFileStatus) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) TaskAttemptContext(org.apache.hadoop.mapreduce.TaskAttemptContext) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) FileSystem(org.apache.hadoop.fs.FileSystem) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) Job(org.apache.hadoop.mapreduce.Job) HFile(org.apache.hadoop.hbase.io.hfile.HFile) Cell(org.apache.hadoop.hbase.Cell) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 13 with HFileScanner

use of org.apache.hadoop.hbase.io.hfile.HFileScanner in project hbase by apache.

the class TestCacheOnWriteInSchema method readStoreFile.

private void readStoreFile(Path path) throws IOException {
    CacheConfig cacheConf = store.getCacheConfig();
    BlockCache cache = cacheConf.getBlockCache();
    StoreFile sf = new StoreFile(fs, path, conf, cacheConf, BloomType.ROWCOL);
    HFile.Reader reader = sf.createReader().getHFileReader();
    try {
        // Open a scanner with (on read) caching disabled
        HFileScanner scanner = reader.getScanner(false, false);
        assertTrue(testDescription, scanner.seekTo());
        // Cribbed from io.hfile.TestCacheOnWrite
        long offset = 0;
        while (offset < reader.getTrailer().getLoadOnOpenDataOffset()) {
            // Flags: don't cache the block, use pread, this is not a compaction.
            // Also, pass null for expected block type to avoid checking it.
            HFileBlock block = reader.readBlock(offset, -1, false, true, false, true, null, DataBlockEncoding.NONE);
            BlockCacheKey blockCacheKey = new BlockCacheKey(reader.getName(), offset);
            boolean isCached = cache.getBlock(blockCacheKey, true, false, true) != null;
            boolean shouldBeCached = cowType.shouldBeCached(block.getBlockType());
            if (shouldBeCached != isCached) {
                throw new AssertionError("shouldBeCached: " + shouldBeCached + "\n" + "isCached: " + isCached + "\n" + "Test description: " + testDescription + "\n" + "block: " + block + "\n" + "blockCacheKey: " + blockCacheKey);
            }
            offset += block.getOnDiskSizeWithHeader();
        }
    } finally {
        reader.close();
    }
}
Also used : HFileBlock(org.apache.hadoop.hbase.io.hfile.HFileBlock) BlockCache(org.apache.hadoop.hbase.io.hfile.BlockCache) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) BlockCacheKey(org.apache.hadoop.hbase.io.hfile.BlockCacheKey)

Example 14 with HFileScanner

use of org.apache.hadoop.hbase.io.hfile.HFileScanner in project hbase by apache.

the class TestFSErrorsExposed method testHFileScannerThrowsErrors.

/**
   * Injects errors into the pread calls of an on-disk file, and makes
   * sure those bubble up to the HFile scanner
   */
@Test
public void testHFileScannerThrowsErrors() throws IOException {
    Path hfilePath = new Path(new Path(util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem) util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    FileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(util.getConfiguration(), cacheConf, hfs).withOutputDir(hfilePath).withFileContext(meta).build();
    TestStoreFile.writeStoreFile(writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
    StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf, BloomType.NONE);
    StoreFileReader reader = sf.createReader();
    HFileScanner scanner = reader.getScanner(false, true);
    FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
    assertNotNull(inStream);
    scanner.seekTo();
    // Do at least one successful read
    assertTrue(scanner.next());
    faultyfs.startFaults();
    try {
        int scanned = 0;
        while (scanner.next()) {
            scanned++;
        }
        fail("Scanner didn't throw after faults injected");
    } catch (IOException ioe) {
        LOG.info("Got expected exception", ioe);
        assertTrue(ioe.getMessage().contains("Fault"));
    }
    // end of test so evictOnClose
    reader.close(true);
}
Also used : Path(org.apache.hadoop.fs.Path) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FileSystem(org.apache.hadoop.fs.FileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 15 with HFileScanner

use of org.apache.hadoop.hbase.io.hfile.HFileScanner in project hbase by apache.

the class TestMajorCompaction method verifyCounts.

private void verifyCounts(int countRow1, int countRow2) throws Exception {
    int count1 = 0;
    int count2 = 0;
    for (StoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
        HFileScanner scanner = f.getReader().getScanner(false, false);
        scanner.seekTo();
        do {
            byte[] row = CellUtil.cloneRow(scanner.getCell());
            if (Bytes.equals(row, STARTROW)) {
                count1++;
            } else if (Bytes.equals(row, secondRowBytes)) {
                count2++;
            }
        } while (scanner.next());
    }
    assertEquals(countRow1, count1);
    assertEquals(countRow2, count2);
}
Also used : HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner)

Aggregations

HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)17 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)8 Configuration (org.apache.hadoop.conf.Configuration)7 HFile (org.apache.hadoop.hbase.io.hfile.HFile)7 Path (org.apache.hadoop.fs.Path)6 Cell (org.apache.hadoop.hbase.Cell)6 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)6 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)6 Test (org.junit.Test)6 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)4 FileSystem (org.apache.hadoop.fs.FileSystem)3 KeyValue (org.apache.hadoop.hbase.KeyValue)3 IOException (java.io.IOException)2 ByteBuffer (java.nio.ByteBuffer)2 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 FilterFileSystem (org.apache.hadoop.fs.FilterFileSystem)1