Search in sources :

Example 41 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestCompoundBloomFilter method writeStoreFile.

private Path writeStoreFile(int t, BloomType bt, List<KeyValue> kvs) throws IOException {
    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, BLOOM_BLOCK_SIZES[t]);
    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
    cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();
    StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(TEST_UTIL.getDataTestDir()).withBloomType(bt).withFileContext(meta).build();
    assertTrue(w.hasGeneralBloom());
    assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter);
    CompoundBloomFilterWriter cbbf = (CompoundBloomFilterWriter) w.getGeneralBloomWriter();
    int keyCount = 0;
    KeyValue prev = null;
    LOG.debug("Total keys/values to insert: " + kvs.size());
    for (KeyValue kv : kvs) {
        w.append(kv);
        // Validate the key count in the Bloom filter.
        boolean newKey = true;
        if (prev != null) {
            newKey = !(bt == BloomType.ROW ? CellUtil.matchingRows(kv, prev) : CellUtil.matchingRowColumn(kv, prev));
        }
        if (newKey)
            ++keyCount;
        assertEquals(keyCount, cbbf.getKeyCount());
        prev = kv;
    }
    w.close();
    return w.getPath();
}
Also used : CompoundBloomFilterWriter(org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 42 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestFSErrorsExposed method testHFileScannerThrowsErrors.

/**
   * Injects errors into the pread calls of an on-disk file, and makes
   * sure those bubble up to the HFile scanner
   */
@Test
public void testHFileScannerThrowsErrors() throws IOException {
    Path hfilePath = new Path(new Path(util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem) util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    FileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(util.getConfiguration(), cacheConf, hfs).withOutputDir(hfilePath).withFileContext(meta).build();
    TestStoreFile.writeStoreFile(writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
    StoreFile sf = new StoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf, BloomType.NONE);
    StoreFileReader reader = sf.createReader();
    HFileScanner scanner = reader.getScanner(false, true);
    FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
    assertNotNull(inStream);
    scanner.seekTo();
    // Do at least one successful read
    assertTrue(scanner.next());
    faultyfs.startFaults();
    try {
        int scanned = 0;
        while (scanner.next()) {
            scanned++;
        }
        fail("Scanner didn't throw after faults injected");
    } catch (IOException ioe) {
        LOG.info("Got expected exception", ioe);
        assertTrue(ioe.getMessage().contains("Fault"));
    }
    // end of test so evictOnClose
    reader.close(true);
}
Also used : Path(org.apache.hadoop.fs.Path) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FileSystem(org.apache.hadoop.fs.FileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 43 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestHRegionServerBulkLoad method createHFile.

/**
   * Create an HFile with the given number of rows with a specified value.
   */
public static void createHFile(FileSystem fs, Path path, byte[] family, byte[] qualifier, byte[] value, int numRows) throws IOException {
    HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE).withCompression(COMPRESSION).build();
    HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(context).create();
    long now = System.currentTimeMillis();
    try {
        // subtract 2 since iterateOnSplits doesn't include boundary keys
        for (int i = 0; i < numRows; i++) {
            KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
            writer.append(kv);
        }
        writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(now));
    } finally {
        writer.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 44 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestStore method testEmptyStoreFile.

/**
   * Test for hbase-1686.
   * @throws IOException
   */
@Test
public void testEmptyStoreFile() throws IOException {
    init(this.name.getMethodName());
    // Write a store file.
    this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null);
    this.store.add(new KeyValue(row, family, qf2, 1, (byte[]) null), null);
    flush(1);
    // Now put in place an empty store file.  Its a little tricky.  Have to
    // do manually with hacked in sequence id.
    StoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = f.getMaxSequenceId();
    Configuration c = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(c);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFileWriter w = new StoreFileWriter.Builder(c, new CacheConfig(c), fs).withOutputDir(storedir).withFileContext(meta).build();
    w.appendMetadata(seqid + 1, false);
    w.close();
    this.store.close();
    // Reopen it... should pick up two files
    this.store = new HStore(this.store.getHRegion(), this.store.getFamily(), c);
    Assert.assertEquals(2, this.store.getStorefilesCount());
    result = HBaseTestingUtility.getFromStoreFile(store, get.getRow(), qualifiers);
    Assert.assertEquals(1, result.size());
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) CompactionConfiguration(org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 45 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestStoreFile method testHFileLink.

@Test
public void testHFileLink() throws IOException {
    final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testHFileLinkTb"));
    // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
    Configuration testConf = new Configuration(this.conf);
    FSUtils.setRootDir(testConf, testDir);
    HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, FSUtils.getTableDir(testDir, hri.getTable()), hri);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    // Make a store file and write data to it.
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(regionFs.createTempName()).withFileContext(meta).build();
    writeStoreFile(writer);
    Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
    Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
    HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
    Path linkFilePath = new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
    // Try to open store file from link
    StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath);
    StoreFile hsf = new StoreFile(this.fs, storeFileInfo, testConf, cacheConf, BloomType.NONE);
    assertTrue(storeFileInfo.isLink());
    // Now confirm that I can read from the link
    int count = 1;
    HFileScanner s = hsf.createReader().getScanner(false, false);
    s.seekTo();
    while (s.next()) {
        count++;
    }
    assertEquals((LAST_CHAR - FIRST_CHAR + 1) * (LAST_CHAR - FIRST_CHAR + 1), count);
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)55 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)51 Path (org.apache.hadoop.fs.Path)34 Test (org.junit.Test)31 KeyValue (org.apache.hadoop.hbase.KeyValue)24 FileSystem (org.apache.hadoop.fs.FileSystem)21 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)21 Cell (org.apache.hadoop.hbase.Cell)12 HFile (org.apache.hadoop.hbase.io.hfile.HFile)11 ByteBuffer (java.nio.ByteBuffer)10 Configuration (org.apache.hadoop.conf.Configuration)9 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)7 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 DataOutputStream (java.io.DataOutputStream)5 SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)5 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)4 PrefixTreeCodec (org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec)4 EncodedSeeker (org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker)4