Search in sources :

Example 11 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class LoadIncrementalHFiles method copyHFileHalf.

/**
   * Copy half of an HFile into a new HFile.
   */
private static void copyHFileHalf(Configuration conf, Path inFile, Path outFile, Reference reference, HColumnDescriptor familyDescriptor) throws IOException {
    FileSystem fs = inFile.getFileSystem(conf);
    CacheConfig cacheConf = new CacheConfig(conf);
    HalfStoreFileReader halfReader = null;
    StoreFileWriter halfWriter = null;
    try {
        halfReader = new HalfStoreFileReader(fs, inFile, cacheConf, reference, conf);
        Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();
        int blocksize = familyDescriptor.getBlocksize();
        Algorithm compression = familyDescriptor.getCompressionType();
        BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
        HFileContext hFileContext = new HFileContextBuilder().withCompression(compression).withChecksumType(HStore.getChecksumType(conf)).withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)).withBlockSize(blocksize).withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true).build();
        halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile).withBloomType(bloomFilterType).withFileContext(hFileContext).build();
        HFileScanner scanner = halfReader.getScanner(false, false, false);
        scanner.seekTo();
        do {
            halfWriter.append(scanner.getCell());
        } while (scanner.next());
        for (Map.Entry<byte[], byte[]> entry : fileInfo.entrySet()) {
            if (shouldCopyHFileMetaKey(entry.getKey())) {
                halfWriter.appendFileInfo(entry.getKey(), entry.getValue());
            }
        }
    } finally {
        if (halfWriter != null) {
            halfWriter.close();
        }
        if (halfReader != null) {
            halfReader.close(cacheConf.shouldEvictOnClose());
        }
    }
}
Also used : StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) HalfStoreFileReader(org.apache.hadoop.hbase.io.HalfStoreFileReader) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) Algorithm(org.apache.hadoop.hbase.io.compress.Compression.Algorithm) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) BloomType(org.apache.hadoop.hbase.regionserver.BloomType) FileSystem(org.apache.hadoop.fs.FileSystem) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Map(java.util.Map) HashMap(java.util.HashMap) TreeMap(java.util.TreeMap)

Example 12 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class BoundedRecoveredHFilesOutputSink method createRecoveredHFileWriter.

/**
 * @return Returns a base HFile without compressions or encodings; good enough for recovery
 *   given hfile has metadata on how it was written.
 */
private StoreFileWriter createRecoveredHFileWriter(TableName tableName, String regionName, long seqId, String familyName, boolean isMetaTable) throws IOException {
    Path outputDir = WALSplitUtil.tryCreateRecoveredHFilesDir(walSplitter.rootFS, walSplitter.conf, tableName, regionName, familyName);
    StoreFileWriter.Builder writerBuilder = new StoreFileWriter.Builder(walSplitter.conf, CacheConfig.DISABLED, walSplitter.rootFS).withOutputDir(outputDir);
    HFileContext hFileContext = new HFileContextBuilder().withChecksumType(StoreUtils.getChecksumType(walSplitter.conf)).withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(walSplitter.conf)).withCellComparator(isMetaTable ? MetaCellComparator.META_COMPARATOR : CellComparatorImpl.COMPARATOR).build();
    return writerBuilder.withFileContext(hFileContext).build();
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 13 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestDataBlockEncoders method testSeekingOnSample.

/**
 * Test seeking while file is encoded.
 */
@Test
public void testSeekingOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
    // create all seekers
    List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        LOG.info("Encoding: " + encoding);
        DataBlockEncoder encoder = encoding.getEncoder();
        if (encoder == null) {
            continue;
        }
        LOG.info("Encoder: " + encoder);
        ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(conf, Compression.Algorithm.NONE, encoding), this.useOffheapData);
        HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(Compression.Algorithm.NONE).build();
        DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(conf, meta));
        seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
        encodedSeekers.add(seeker);
    }
    LOG.info("Testing it!");
    // try a few random seeks
    for (boolean seekBefore : new boolean[] { false, true }) {
        for (int i = 0; i < NUM_RANDOM_SEEKS; ++i) {
            int keyValueId;
            if (!seekBefore) {
                keyValueId = randomizer.nextInt(sampleKv.size());
            } else {
                keyValueId = randomizer.nextInt(sampleKv.size() - 1) + 1;
            }
            KeyValue keyValue = sampleKv.get(keyValueId);
            checkSeekingConsistency(encodedSeekers, seekBefore, keyValue);
        }
    }
    // check edge cases
    LOG.info("Checking edge cases");
    checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
    for (boolean seekBefore : new boolean[] { false, true }) {
        checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
        KeyValue midKv = sampleKv.get(sampleKv.size() / 2);
        Cell lastMidKv = PrivateCellUtil.createLastOnRowCol(midKv);
        checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
    }
    LOG.info("Done");
}
Also used : KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) ByteBuffer(java.nio.ByteBuffer) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) SingleByteBuff(org.apache.hadoop.hbase.nio.SingleByteBuff) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Example 14 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestDataBlockEncoders method getEncodingContext.

private HFileBlockEncodingContext getEncodingContext(Configuration conf, Compression.Algorithm algo, DataBlockEncoding encoding) {
    DataBlockEncoder encoder = encoding.getEncoder();
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false).withIncludesMvcc(includesMemstoreTS).withIncludesTags(includesTags).withCompression(algo).build();
    if (encoder != null) {
        return encoder.newDataBlockEncodingContext(conf, encoding, HFILEBLOCK_DUMMY_HEADER, meta);
    } else {
        return new HFileBlockDefaultEncodingContext(conf, encoding, HFILEBLOCK_DUMMY_HEADER, meta);
    }
}
Also used : HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 15 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestReversibleScanners method testReversibleStoreFileScanner.

@Test
public void testReversibleStoreFileScanner() throws IOException {
    FileSystem fs = TEST_UTIL.getTestFileSystem();
    Path hfilePath = new Path(new Path(TEST_UTIL.getDataTestDir("testReversibleStoreFileScanner"), "regionname"), "familyname");
    CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
        HFileContextBuilder hcBuilder = new HFileContextBuilder();
        hcBuilder.withBlockSize(2 * 1024);
        hcBuilder.withDataBlockEncoding(encoding);
        HFileContext hFileContext = hcBuilder.build();
        StoreFileWriter writer = new StoreFileWriter.Builder(TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath).withFileContext(hFileContext).build();
        writeStoreFile(writer);
        HStoreFile sf = new HStoreFile(fs, writer.getPath(), TEST_UTIL.getConfiguration(), cacheConf, BloomType.NONE, true);
        List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, Long.MAX_VALUE);
        StoreFileScanner scanner = scanners.get(0);
        seekTestOfReversibleKeyValueScanner(scanner);
        for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
            LOG.info("Setting read point to " + readPoint);
            scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, readPoint);
            seekTestOfReversibleKeyValueScannerWithMVCC(scanners, readPoint);
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DataBlockEncoding(org.apache.hadoop.hbase.io.encoding.DataBlockEncoding) FileSystem(org.apache.hadoop.fs.FileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5