Search in sources :

Example 1 with RedundantKVGenerator

use of org.apache.hadoop.hbase.util.RedundantKVGenerator in project hbase by apache.

the class TestHFileEncryption method testHFileEncryption.

@Test
public void testHFileEncryption() throws Exception {
    // Create 1000 random test KVs
    RedundantKVGenerator generator = new RedundantKVGenerator();
    List<KeyValue> testKvs = generator.generateTestKeyValues(1000);
    // Iterate through data block encoding and compression combinations
    Configuration conf = TEST_UTIL.getConfiguration();
    CacheConfig cacheConf = new CacheConfig(conf);
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        for (Compression.Algorithm compression : HBaseCommonTestingUtil.COMPRESSION_ALGORITHMS) {
            HFileContext fileContext = new HFileContextBuilder().withBlockSize(// small blocks
            4096).withEncryptionContext(cryptoContext).withCompression(compression).withDataBlockEncoding(encoding).build();
            // write a new test HFile
            LOG.info("Writing with " + fileContext);
            Path path = new Path(TEST_UTIL.getDataTestDir(), HBaseCommonTestingUtil.getRandomUUID().toString() + ".hfile");
            FSDataOutputStream out = fs.create(path);
            HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(out).withFileContext(fileContext).create();
            try {
                for (KeyValue kv : testKvs) {
                    writer.append(kv);
                }
            } finally {
                writer.close();
                out.close();
            }
            // read it back in
            LOG.info("Reading with " + fileContext);
            int i = 0;
            HFileScanner scanner = null;
            HFile.Reader reader = HFile.createReader(fs, path, cacheConf, true, conf);
            try {
                FixedFileTrailer trailer = reader.getTrailer();
                assertNotNull(trailer.getEncryptionKey());
                scanner = reader.getScanner(conf, false, false);
                assertTrue("Initial seekTo failed", scanner.seekTo());
                do {
                    Cell kv = scanner.getCell();
                    assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv)));
                    i++;
                } while (scanner.next());
            } finally {
                reader.close();
                scanner.close();
            }
            assertEquals("Did not read back as many KVs as written", i, testKvs.size());
            // Test random seeks with pread
            LOG.info("Random seeking with " + fileContext);
            reader = HFile.createReader(fs, path, cacheConf, true, conf);
            try {
                scanner = reader.getScanner(conf, false, true);
                assertTrue("Initial seekTo failed", scanner.seekTo());
                for (i = 0; i < 100; i++) {
                    KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
                    assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
                }
            } finally {
                scanner.close();
                reader.close();
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) Compression(org.apache.hadoop.hbase.io.compress.Compression) KeyValue(org.apache.hadoop.hbase.KeyValue) RedundantKVGenerator(org.apache.hadoop.hbase.util.RedundantKVGenerator) Configuration(org.apache.hadoop.conf.Configuration) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 2 with RedundantKVGenerator

use of org.apache.hadoop.hbase.util.RedundantKVGenerator in project hbase by apache.

the class HFileTestBase method doTest.

@SuppressWarnings("deprecation")
public void doTest(Configuration conf, Path path, Compression.Algorithm compression) throws Exception {
    // Create 10000 random test KVs
    RedundantKVGenerator generator = new RedundantKVGenerator();
    List<KeyValue> testKvs = generator.generateTestKeyValues(10000);
    // Iterate through data block encoding and compression combinations
    CacheConfig cacheConf = new CacheConfig(conf);
    HFileContext fileContext = new HFileContextBuilder().withBlockSize(// small block
    4096).withCompression(compression).build();
    // write a new test HFile
    LOG.info("Writing with " + fileContext);
    FSDataOutputStream out = FS.create(path);
    HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(out).withFileContext(fileContext).create();
    try {
        for (KeyValue kv : testKvs) {
            writer.append(kv);
        }
    } finally {
        writer.close();
        out.close();
    }
    // read it back in
    LOG.info("Reading with " + fileContext);
    int i = 0;
    HFileScanner scanner = null;
    HFile.Reader reader = HFile.createReader(FS, path, cacheConf, true, conf);
    try {
        scanner = reader.getScanner(conf, false, false);
        assertTrue("Initial seekTo failed", scanner.seekTo());
        do {
            Cell kv = scanner.getCell();
            assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv)));
            i++;
        } while (scanner.next());
    } finally {
        reader.close();
        scanner.close();
    }
    assertEquals("Did not read back as many KVs as written", i, testKvs.size());
    // Test random seeks with pread
    LOG.info("Random seeking with " + fileContext);
    reader = HFile.createReader(FS, path, cacheConf, true, conf);
    try {
        scanner = reader.getScanner(conf, false, true);
        assertTrue("Initial seekTo failed", scanner.seekTo());
        for (i = 0; i < 100; i++) {
            KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
            assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
        }
    } finally {
        scanner.close();
        reader.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) RedundantKVGenerator(org.apache.hadoop.hbase.util.RedundantKVGenerator) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Cell(org.apache.hadoop.hbase.Cell)

Aggregations

FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)2 Cell (org.apache.hadoop.hbase.Cell)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 RedundantKVGenerator (org.apache.hadoop.hbase.util.RedundantKVGenerator)2 Configuration (org.apache.hadoop.conf.Configuration)1 Path (org.apache.hadoop.fs.Path)1 Compression (org.apache.hadoop.hbase.io.compress.Compression)1 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)1 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)1 HFile (org.apache.hadoop.hbase.io.hfile.HFile)1 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)1 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)1 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)1 Test (org.junit.Test)1