Search in sources :

Example 56 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestEncryptionKeyRotation method extractHFileKey.

private static byte[] extractHFileKey(Path path) throws Exception {
    HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), conf);
    try {
        reader.loadFileInfo();
        Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
        assertNotNull("Reader has a null crypto context", cryptoContext);
        Key key = cryptoContext.getKey();
        assertNotNull("Crypto context has no key", key);
        return key.getEncoded();
    } finally {
        reader.close();
    }
}
Also used : Encryption(org.apache.hadoop.hbase.io.crypto.Encryption) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Key(java.security.Key)

Example 57 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestEncryptionRandomKeying method extractHFileKey.

private static byte[] extractHFileKey(Path path) throws Exception {
    HFile.Reader reader = HFile.createReader(TEST_UTIL.getTestFileSystem(), path, new CacheConfig(conf), conf);
    try {
        reader.loadFileInfo();
        Encryption.Context cryptoContext = reader.getFileContext().getEncryptionContext();
        assertNotNull("Reader has a null crypto context", cryptoContext);
        Key key = cryptoContext.getKey();
        if (key == null) {
            return null;
        }
        return key.getEncoded();
    } finally {
        reader.close();
    }
}
Also used : Encryption(org.apache.hadoop.hbase.io.crypto.Encryption) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Key(java.security.Key)

Example 58 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestHRegionServerBulkLoad method createHFile.

/**
   * Create an HFile with the given number of rows with a specified value.
   */
public static void createHFile(FileSystem fs, Path path, byte[] family, byte[] qualifier, byte[] value, int numRows) throws IOException {
    HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE).withCompression(COMPRESSION).build();
    HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(context).create();
    long now = System.currentTimeMillis();
    try {
        // subtract 2 since iterateOnSplits doesn't include boundary keys
        for (int i = 0; i < numRows; i++) {
            KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
            writer.append(kv);
        }
        writer.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(now));
    } finally {
        writer.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 59 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestMobStoreCompaction method countMobCellsInMetadata.

private long countMobCellsInMetadata() throws IOException {
    long mobCellsCount = 0;
    Path mobDirPath = MobUtils.getMobFamilyPath(conf, htd.getTableName(), hcd.getNameAsString());
    Configuration copyOfConf = new Configuration(conf);
    copyOfConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0f);
    CacheConfig cacheConfig = new CacheConfig(copyOfConf);
    if (fs.exists(mobDirPath)) {
        FileStatus[] files = UTIL.getTestFileSystem().listStatus(mobDirPath);
        for (FileStatus file : files) {
            StoreFile sf = new StoreFile(fs, file.getPath(), conf, cacheConfig, BloomType.NONE);
            Map<byte[], byte[]> fileInfo = sf.createReader().loadFileInfo();
            byte[] count = fileInfo.get(StoreFile.MOB_CELLS_COUNT);
            assertTrue(count != null);
            mobCellsCount += Bytes.toLong(count);
        }
    }
    return mobCellsCount;
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) Configuration(org.apache.hadoop.conf.Configuration) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig)

Example 60 with CacheConfig

use of org.apache.hadoop.hbase.io.hfile.CacheConfig in project hbase by apache.

the class TestStore method testEmptyStoreFile.

/**
   * Test for hbase-1686.
   * @throws IOException
   */
@Test
public void testEmptyStoreFile() throws IOException {
    init(this.name.getMethodName());
    // Write a store file.
    this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null);
    this.store.add(new KeyValue(row, family, qf2, 1, (byte[]) null), null);
    flush(1);
    // Now put in place an empty store file.  Its a little tricky.  Have to
    // do manually with hacked in sequence id.
    StoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = f.getMaxSequenceId();
    Configuration c = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(c);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFileWriter w = new StoreFileWriter.Builder(c, new CacheConfig(c), fs).withOutputDir(storedir).withFileContext(meta).build();
    w.appendMetadata(seqid + 1, false);
    w.close();
    this.store.close();
    // Reopen it... should pick up two files
    this.store = new HStore(this.store.getHRegion(), this.store.getFamily(), c);
    Assert.assertEquals(2, this.store.getStorefilesCount());
    result = HBaseTestingUtility.getFromStoreFile(store, get.getRow(), qualifiers);
    Assert.assertEquals(1, result.size());
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) CompactionConfiguration(org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)63 Path (org.apache.hadoop.fs.Path)28 Test (org.junit.Test)26 Configuration (org.apache.hadoop.conf.Configuration)21 HFile (org.apache.hadoop.hbase.io.hfile.HFile)21 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)21 FileSystem (org.apache.hadoop.fs.FileSystem)20 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)20 BlockCache (org.apache.hadoop.hbase.io.hfile.BlockCache)15 KeyValue (org.apache.hadoop.hbase.KeyValue)14 TableName (org.apache.hadoop.hbase.TableName)14 Region (org.apache.hadoop.hbase.regionserver.Region)13 Store (org.apache.hadoop.hbase.regionserver.Store)13 Cell (org.apache.hadoop.hbase.Cell)10 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)10 CombinedBlockCache (org.apache.hadoop.hbase.io.hfile.CombinedBlockCache)10 IOException (java.io.IOException)9 CountDownLatch (java.util.concurrent.CountDownLatch)8 FileStatus (org.apache.hadoop.fs.FileStatus)8 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)8