Search in sources :

Example 86 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class HFileTestBase method doTest.

@SuppressWarnings("deprecation")
public void doTest(Configuration conf, Path path, Compression.Algorithm compression) throws Exception {
    // Create 10000 random test KVs
    RedundantKVGenerator generator = new RedundantKVGenerator();
    List<KeyValue> testKvs = generator.generateTestKeyValues(10000);
    // Iterate through data block encoding and compression combinations
    CacheConfig cacheConf = new CacheConfig(conf);
    HFileContext fileContext = new HFileContextBuilder().withBlockSize(// small block
    4096).withCompression(compression).build();
    // write a new test HFile
    LOG.info("Writing with " + fileContext);
    FSDataOutputStream out = FS.create(path);
    HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(out).withFileContext(fileContext).create();
    try {
        for (KeyValue kv : testKvs) {
            writer.append(kv);
        }
    } finally {
        writer.close();
        out.close();
    }
    // read it back in
    LOG.info("Reading with " + fileContext);
    int i = 0;
    HFileScanner scanner = null;
    HFile.Reader reader = HFile.createReader(FS, path, cacheConf, true, conf);
    try {
        scanner = reader.getScanner(conf, false, false);
        assertTrue("Initial seekTo failed", scanner.seekTo());
        do {
            Cell kv = scanner.getCell();
            assertTrue("Read back an unexpected or invalid KV", testKvs.contains(KeyValueUtil.ensureKeyValue(kv)));
            i++;
        } while (scanner.next());
    } finally {
        reader.close();
        scanner.close();
    }
    assertEquals("Did not read back as many KVs as written", i, testKvs.size());
    // Test random seeks with pread
    LOG.info("Random seeking with " + fileContext);
    reader = HFile.createReader(FS, path, cacheConf, true, conf);
    try {
        scanner = reader.getScanner(conf, false, true);
        assertTrue("Initial seekTo failed", scanner.seekTo());
        for (i = 0; i < 100; i++) {
            KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
            assertEquals("Unable to find KV as expected: " + kv, 0, scanner.seekTo(kv));
        }
    } finally {
        scanner.close();
        reader.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) RedundantKVGenerator(org.apache.hadoop.hbase.util.RedundantKVGenerator) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Cell(org.apache.hadoop.hbase.Cell)

Example 87 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestSeekToBlockWithEncoders method seekToTheKey.

private void seekToTheKey(KeyValue expected, List<KeyValue> kvs, Cell toSeek) throws IOException {
    // create all seekers
    List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        if (encoding.getEncoder() == null) {
            continue;
        }
        DataBlockEncoder encoder = encoding.getEncoder();
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(false).withCompression(Compression.Algorithm.NONE).build();
        HFileBlockEncodingContext encodingContext = encoder.newDataBlockEncodingContext(conf, encoding, HFILEBLOCK_DUMMY_HEADER, meta);
        ByteBuffer encodedBuffer = TestDataBlockEncoders.encodeKeyValues(encoding, kvs, encodingContext, this.useOffheapData);
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        encodedSeekers.add(seeker);
    }
    // test it!
    // try a few random seeks
    checkSeekingConsistency(encodedSeekers, toSeek, expected);
}
Also used : ArrayList(java.util.ArrayList) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 88 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestFSErrorsExposed method testHFileScannerThrowsErrors.

/**
 * Injects errors into the pread calls of an on-disk file, and makes
 * sure those bubble up to the HFile scanner
 */
@Test
public void testHFileScannerThrowsErrors() throws IOException {
    Path hfilePath = new Path(new Path(util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname");
    HFileSystem hfs = (HFileSystem) util.getTestFileSystem();
    FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
    FileSystem fs = new HFileSystem(faultyfs);
    CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(util.getConfiguration(), cacheConf, hfs).withOutputDir(hfilePath).withFileContext(meta).build();
    TestHStoreFile.writeStoreFile(writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
    HStoreFile sf = new HStoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf, BloomType.NONE, true);
    sf.initReader();
    StoreFileReader reader = sf.getReader();
    HFileScanner scanner = reader.getScanner(false, true);
    FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
    assertNotNull(inStream);
    scanner.seekTo();
    // Do at least one successful read
    assertTrue(scanner.next());
    faultyfs.startFaults();
    try {
        int scanned = 0;
        while (scanner.next()) {
            scanned++;
        }
        fail("Scanner didn't throw after faults injected");
    } catch (IOException ioe) {
        LOG.info("Got expected exception", ioe);
        assertTrue(ioe.getMessage().contains("Fault"));
    }
    // end of test so evictOnClose
    reader.close(true);
}
Also used : Path(org.apache.hadoop.fs.Path) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FileSystem(org.apache.hadoop.fs.FileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) HFileSystem(org.apache.hadoop.hbase.fs.HFileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 89 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHRegionServerBulkLoad method createHFile.

/**
 * Create an HFile with the given number of rows with a specified value.
 */
public static void createHFile(FileSystem fs, Path path, byte[] family, byte[] qualifier, byte[] value, int numRows) throws IOException {
    HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE).withCompression(COMPRESSION).build();
    HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(context).create();
    long now = EnvironmentEdgeManager.currentTime();
    try {
        // subtract 2 since iterateOnSplits doesn't include boundary keys
        for (int i = 0; i < numRows; i++) {
            KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
            writer.append(kv);
        }
        writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(now));
    } finally {
        writer.close();
    }
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5