Search in sources :

Example 1 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class CompressionTest method doSmokeTest.

public static void doSmokeTest(FileSystem fs, Path path, String codec) throws Exception {
    Configuration conf = HBaseConfiguration.create();
    HFileContext context = new HFileContextBuilder().withCompression(HFileWriterImpl.compressionByName(codec)).build();
    HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withPath(fs, path).withFileContext(context).create();
    // Write any-old Cell...
    final byte[] rowKey = Bytes.toBytes("compressiontestkey");
    Cell c = CellUtil.createCell(rowKey, Bytes.toBytes("compressiontestval"));
    writer.append(c);
    writer.appendFileInfo(Bytes.toBytes("compressioninfokey"), Bytes.toBytes("compressioninfoval"));
    writer.close();
    Cell cc = null;
    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), conf);
    try {
        reader.loadFileInfo();
        HFileScanner scanner = reader.getScanner(false, true);
        // position to the start of file
        scanner.seekTo();
        // Scanner does not do Cells yet. Do below for now till fixed.
        cc = scanner.getCell();
        if (CellComparator.COMPARATOR.compareRows(c, cc) != 0) {
            throw new Exception("Read back incorrect result: " + c.toString() + " vs " + cc.toString());
        }
    } finally {
        reader.close();
    }
}
Also used : HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) Configuration(org.apache.hadoop.conf.Configuration) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) Cell(org.apache.hadoop.hbase.Cell) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 2 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class LoadIncrementalHFiles method copyHFileHalf.

/**
   * Copy half of an HFile into a new HFile.
   */
private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile, Reference reference, HColumnDescriptor familyDescriptor) throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFileWriter halfWriter = null;
    try {
        halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf);
        Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();
        int blocksize = familyDescriptor.getBlocksize();
        Algorithm compression = familyDescriptor.getCompressionType();
        BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
        HFileContext hFileContext = new HFileContextBuilder().withCompression(compression).withChecksumType(HStore.getChecksumType(conf)).withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize).withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true).build();
        halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile).withBloomType(bloomFilterType).withFileContext(hFileContext).build();
        HFileScanner scanner = halfReader.getScanner(false, false, false);
        scanner.seekTo();
        do {
            halfWriter.append(scanner.getCell());
        } while (scanner.next());
        for (Map.Entry<byte[], byte[]> entry : fileInfo.entrySet()) {
            if (shouldCopyHFileMetaKey(entry.getKey())) {
                halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
            }
        }
    } finally {
        if (halfWriter != null) {
            halfWriter.close();
        }
        if (halfReader != null) {
            halfReader.close(cacheConf.shouldEvictOnClose());
        }
    }
}
Also used : StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) HalfStoreFileReader(org.apache.hadoop.hbase.io.HalfStoreFileReader) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) Algorithm(org.apache.hadoop.hbase.io.compress.Compression.Algorithm) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) FileSystem(org.apache.hadoop.fs.FileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap)

Example 3 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestPrefixTreeEncoding method testSeekBeforeWithFixedData.

@Test
public void testSeekBeforeWithFixedData() throws Exception {
    formatRowNum = true;
    PrefixTreeCodec encoder = new PrefixTreeCodec();
    int batchId = numBatchesWritten++;
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(false).withIncludesTags(includesTag).withCompression(Algorithm.NONE).build();
    HFileBlockEncodingContext blkEncodingCtx = new HFileBlockDefaultEncodingContext(DataBlockEncoding.PREFIX_TREE, new byte[0], meta);
    ByteArrayOutputStream baosInMemory = new ByteArrayOutputStream();
    DataOutputStream userDataStream = new DataOutputStream(baosInMemory);
    generateFixedTestData(kvset, batchId, false, includesTag, encoder, blkEncodingCtx, userDataStream);
    EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
    byte[] onDiskBytes = baosInMemory.toByteArray();
    ByteBuffer readBuffer = ByteBuffer.wrap(onDiskBytes, DataBlockEncoding.ID_SIZE, onDiskBytes.length - DataBlockEncoding.ID_SIZE);
    seeker.setCurrentBuffer(new SingleByteBuff(readBuffer));
    // Seek before the first keyvalue;
    Cell seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(getRowKey(batchId, 0), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertEquals(null, seeker.getCell());
    // Seek before the middle keyvalue;
    seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertNotNull(seeker.getCell());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH / 3 - 1), CellUtil.cloneRow(seeker.getCell()));
    // Seek before the last keyvalue;
    seekKey = CellUtil.createFirstDeleteFamilyCellOnRow(Bytes.toBytes("zzzz"), CF_BYTES);
    seeker.seekToKeyInBlock(seekKey, true);
    assertNotNull(seeker.getCell());
    assertArrayEquals(getRowKey(batchId, NUM_ROWS_PER_BATCH - 1), CellUtil.cloneRow(seeker.getCell()));
}
Also used : EncodedSeeker(org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker) DataOutputStream(java.io.DataOutputStream) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) PrefixTreeCodec(org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 4 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestHalfStoreFileReader method testHalfScanner.

// Tests the scanner on an HFile that is backed by HalfStoreFiles
@Test
public void testHalfScanner() throws IOException {
    String root_dir = TEST_UTIL.getDataTestDir().toString();
    Path p = new Path(root_dir, "test");
    Configuration conf = TEST_UTIL.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
    HFile.Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();
    // write some things.
    List<KeyValue> items = genSomeKeys();
    for (KeyValue kv : items) {
        w.append(kv);
    }
    w.close();
    HFile.Reader r = HFile.createReader(fs, p, cacheConf, conf);
    r.loadFileInfo();
    Cell midKV = r.midkey();
    byte[] midkey = CellUtil.cloneRow(midKV);
    Reference bottom = new Reference(midkey, Reference.Range.bottom);
    Reference top = new Reference(midkey, Reference.Range.top);
    // Ugly code to get the item before the midkey
    KeyValue beforeMidKey = null;
    for (KeyValue item : items) {
        if (CellComparator.COMPARATOR.compare(item, midKV) >= 0) {
            break;
        }
        beforeMidKey = item;
    }
    System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
    System.out.println("beforeMidKey: " + beforeMidKey);
    // Seek on the splitKey, should be in top, not in bottom
    Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
    assertEquals(beforeMidKey, foundKeyValue);
    // Seek tot the last thing should be the penultimate on the top, the one before the midkey on the bottom.
    foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(items.size() - 1), cacheConf);
    assertEquals(items.get(items.size() - 2), foundKeyValue);
    foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(items.size() - 1), cacheConf);
    assertEquals(beforeMidKey, foundKeyValue);
    // Try and seek before something that is in the bottom.
    foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(0), cacheConf);
    assertNull(foundKeyValue);
    // Try and seek before the first thing.
    foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(0), cacheConf);
    assertNull(foundKeyValue);
    // Try and seek before the second thing in the top and bottom.
    foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(1), cacheConf);
    assertNull(foundKeyValue);
    foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(1), cacheConf);
    assertEquals(items.get(0), foundKeyValue);
    // Try to seek before the splitKey in the top file
    foundKeyValue = doTestOfSeekBefore(p, fs, top, midKV, cacheConf);
    assertNull(foundKeyValue);
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) FileSystem(org.apache.hadoop.fs.FileSystem) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 5 with HFileContext

use of org.apache.hadoop.hbase.io.hfile.HFileContext in project hbase by apache.

the class TestDataBlockEncoders method testSeekingOnSample.

/**
   * Test seeking while file is encoded.
   */
@Test
public void testSeekingOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
    // create all seekers
    List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        LOG.info("Encoding: " + encoding);
        // TODO remove this once support is added. HBASE-12298
        if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE)
            continue;
        DataBlockEncoder encoder = encoding.getEncoder();
        if (encoder == null) {
            continue;
        }
        LOG.info("Encoder: " + encoder);
        ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData);
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        encodedSeekers.add(seeker);
    }
    LOG.info("Testing it!");
    // try a few random seeks
    for (boolean seekBefore : new boolean[] { false, true }) {
        for (int i = 0; i < NUM_RANDOM_SEEKS; ++i) {
            int keyValueId;
            if (!seekBefore) {
                keyValueId = randomizer.nextInt(sampleKv.size());
            } else {
                keyValueId = randomizer.nextInt(sampleKv.size() - 1) + 1;
            }
            KeyValue keyValue = sampleKv.get(keyValueId);
            checkSeekingConsistency(encodedSeekers, seekBefore, keyValue);
        }
    }
    // check edge cases
    LOG.info("Checking edge cases");
    checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
    for (boolean seekBefore : new boolean[] { false, true }) {
        checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
        KeyValue midKv = sampleKv.get(sampleKv.size() / 2);
        Cell lastMidKv = CellUtil.createLastOnRowCol(midKv);
        checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
    }
    LOG.info("Done");
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)55 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)51 Path (org.apache.hadoop.fs.Path)34 Test (org.junit.Test)31 KeyValue (org.apache.hadoop.hbase.KeyValue)24 FileSystem (org.apache.hadoop.fs.FileSystem)21 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)21 Cell (org.apache.hadoop.hbase.Cell)12 HFile (org.apache.hadoop.hbase.io.hfile.HFile)11 ByteBuffer (java.nio.ByteBuffer)10 Configuration (org.apache.hadoop.conf.Configuration)9 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)7 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 DataOutputStream (java.io.DataOutputStream)5 SingleByteBuff (org.apache.hadoop.hbase.nio.SingleByteBuff)5 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)4 PrefixTreeCodec (org.apache.hadoop.hbase.codec.prefixtree.PrefixTreeCodec)4 EncodedSeeker (org.apache.hadoop.hbase.io.encoding.DataBlockEncoder.EncodedSeeker)4