Search in sources :

Example 71 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestCompoundBloomFilter method writeStoreFile.

private Path writeStoreFile(int t, BloomType bt, List<KeyValue> kvs) throws IOException {
    conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, BLOOM_BLOCK_SIZES[t]);
    conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
    cacheConf = new CacheConfig(conf, blockCache);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();
    StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(TEST_UTIL.getDataTestDir()).withBloomType(bt).withFileContext(meta).build();
    assertTrue(w.hasGeneralBloom());
    assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter);
    CompoundBloomFilterWriter cbbf = (CompoundBloomFilterWriter) w.getGeneralBloomWriter();
    int keyCount = 0;
    KeyValue prev = null;
    LOG.debug("Total keys/values to insert: " + kvs.size());
    for (KeyValue kv : kvs) {
        w.append(kv);
        // Validate the key count in the Bloom filter.
        boolean newKey = true;
        if (prev != null) {
            newKey = !(bt == BloomType.ROW ? CellUtil.matchingRows(kv, prev) : CellUtil.matchingRowColumn(kv, prev));
        }
        if (newKey)
            ++keyCount;
        assertEquals(keyCount, cbbf.getKeyCount());
        prev = kv;
    }
    w.close();
    return w.getPath();
}
Also used : CompoundBloomFilterWriter(org.apache.hadoop.hbase.io.hfile.CompoundBloomFilterWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 72 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestScannerWithBulkload method writeToHFile.

// If nativeHFile is true, we will set cell seq id and MAX_SEQ_ID_KEY in the file.
// Else, we will set BULKLOAD_TIME_KEY.
private Path writeToHFile(long l, String hFilePath, String pathStr, boolean nativeHFile) throws IOException {
    FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
    final Path hfilePath = new Path(hFilePath);
    fs.mkdirs(hfilePath);
    Path path = new Path(pathStr);
    HFile.WriterFactory wf = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
    Assert.assertNotNull(wf);
    HFileContext context = new HFileContextBuilder().build();
    HFile.Writer writer = wf.withPath(fs, path).withFileContext(context).create();
    KeyValue kv = new KeyValue(Bytes.toBytes("row1"), Bytes.toBytes("col"), Bytes.toBytes("q"), l, Bytes.toBytes("version2"));
    // Set cell seq id to test bulk load native hfiles.
    if (nativeHFile) {
        // Set a big seq id. Scan should not look at this seq id in a bulk loaded file.
        // Scan should only look at the seq id appended at the bulk load time, and not skip
        // this kv.
        kv.setSequenceId(9999999);
    }
    writer.append(kv);
    if (nativeHFile) {
        // Set a big MAX_SEQ_ID_KEY. Scan should not look at this seq id in a bulk loaded file.
        // Scan should only look at the seq id appended at the bulk load time, and not skip its
        // kv.
        writer.appendFileInfo(MAX_SEQ_ID_KEY, Bytes.toBytes(new Long(9999999)));
    } else {
        writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(EnvironmentEdgeManager.currentTime()));
    }
    writer.close();
    return hfilePath;
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) FileSystem(org.apache.hadoop.fs.FileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFile(org.apache.hadoop.hbase.io.hfile.HFile) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 73 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestStoreScannerClosure method writeStoreFile.

private Path writeStoreFile() throws IOException {
    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile");
    HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(CONF, fs).withOutputDir(storeFileParentDir).withFileContext(meta).build();
    final int rowLen = 32;
    Random RNG = new Random();
    for (int i = 0; i < 1000; ++i) {
        byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
        byte[] v = RandomKeyValueUtil.randomValue(RNG);
        int cfLen = RNG.nextInt(k.length - rowLen + 1);
        KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
        sfw.append(kv);
    }
    sfw.close();
    return sfw.getPath();
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Random(java.util.Random) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 74 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestSecureBulkloadListener method createHFileForFamilies.

private String createHFileForFamilies(byte[] family) throws IOException {
    HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(conf);
    Path testDir = new Path(dfs.getWorkingDirectory(), new Path(name.getMethodName(), Bytes.toString(family)));
    if (!dfs.exists(testDir)) {
        dfs.mkdirs(testDir);
    }
    Path hfilePath = new Path(testDir, generateUniqueName(null));
    FSDataOutputStream out = dfs.createFile(hfilePath).build();
    try {
        hFileFactory.withOutputStream(out);
        hFileFactory.withFileContext(new HFileContextBuilder().build());
        HFile.Writer writer = hFileFactory.create();
        try {
            writer.append(new KeyValue(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(randomBytes).setFamily(family).setQualifier(randomBytes).setTimestamp(0L).setType(KeyValue.Type.Put.getCode()).setValue(randomBytes).build()));
        } finally {
            writer.close();
        }
    } finally {
        out.close();
    }
    return hfilePath.toString();
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) HFile(org.apache.hadoop.hbase.io.hfile.HFile)

Example 75 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestStoreFileScannerWithTagCompression method testReseek.

@Test
public void testReseek() throws Exception {
    // write the file
    Path f = new Path(ROOT_DIR, "testReseek");
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).withIncludesTags(true).withCompressTags(true).withDataBlockEncoding(DataBlockEncoding.PREFIX).build();
    // Make a store file and write data to it.
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(f).withFileContext(meta).build();
    writeStoreFile(writer);
    writer.close();
    ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
    HFileInfo fileInfo = new HFileInfo(context, conf);
    StoreFileReader reader = new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
    fileInfo.initMetaAndIndex(reader.getHFileReader());
    StoreFileScanner s = reader.getStoreFileScanner(false, false, false, 0, 0, false);
    try {
        // Now do reseek with empty KV to position to the beginning of the file
        KeyValue k = KeyValueUtil.createFirstOnRow(Bytes.toBytes("k2"));
        s.reseek(k);
        Cell kv = s.next();
        kv = s.next();
        kv = s.next();
        byte[] key5 = Bytes.toBytes("k5");
        assertTrue(Bytes.equals(key5, 0, key5.length, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()));
        List<Tag> tags = PrivateCellUtil.getTags(kv);
        assertEquals(1, tags.size());
        assertEquals("tag3", Bytes.toString(Tag.cloneValue(tags.get(0))));
    } finally {
        s.close();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) HFileInfo(org.apache.hadoop.hbase.io.hfile.HFileInfo) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) ReaderContext(org.apache.hadoop.hbase.io.hfile.ReaderContext) ReaderContextBuilder(org.apache.hadoop.hbase.io.hfile.ReaderContextBuilder) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag) Cell(org.apache.hadoop.hbase.Cell) Test(org.junit.Test)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5