Search in sources :

Example 26 with StoreFileWriter

use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.

the class TestMobFile method testGetScanner.

@Test
public void testGetScanner() throws Exception {
    Path testDir = TEST_UTIL.getDataTestDir();
    FileSystem fs = testDir.getFileSystem(conf);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
    MobTestUtil.writeStoreFile(writer, testName.getMethodName());
    MobFile mobFile = new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
    assertNotNull(mobFile.getScanner());
    assertTrue(mobFile.getScanner() instanceof StoreFileScanner);
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) FileSystem(org.apache.hadoop.fs.FileSystem) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner) Test(org.junit.Test)

Example 27 with StoreFileWriter

use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.

the class TestIgnoreUnknownFamily method addStoreFileToKnownFamily.

private void addStoreFileToKnownFamily(RegionInfo region) throws IOException {
    MasterFileSystem mfs = UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
    Path regionDir = FSUtils.getRegionDirFromRootDir(CommonFSUtils.getRootDir(mfs.getConfiguration()), region);
    Path familyDir = new Path(regionDir, Bytes.toString(UNKNOWN_FAMILY));
    StoreFileWriter writer = new StoreFileWriter.Builder(mfs.getConfiguration(), mfs.getFileSystem()).withOutputDir(familyDir).withFileContext(new HFileContextBuilder().build()).build();
    writer.close();
}
Also used : MasterFileSystem(org.apache.hadoop.hbase.master.MasterFileSystem) Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)

Example 28 with StoreFileWriter

use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.

the class TestHFile method writeStoreFile.

private Path writeStoreFile() throws IOException {
    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile");
    HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).build();
    final int rowLen = 32;
    Random RNG = new Random();
    for (int i = 0; i < 1000; ++i) {
        byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
        byte[] v = RandomKeyValueUtil.randomValue(RNG);
        int cfLen = RNG.nextInt(k.length - rowLen + 1);
        KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
        sfw.append(kv);
    }
    sfw.close();
    return sfw.getPath();
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) ByteBufferKeyValue(org.apache.hadoop.hbase.ByteBufferKeyValue) KeyValue(org.apache.hadoop.hbase.KeyValue) Random(java.util.Random)

Example 29 with StoreFileWriter

use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.

the class TestCacheOnWrite method writeStoreFile.

private void writeStoreFile(boolean useTags) throws IOException {
    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "test_cache_on_write");
    HFileContext meta = new HFileContextBuilder().withCompression(compress).withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL).withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()).withIncludesTags(useTags).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build();
    byte[] cf = Bytes.toBytes("fam");
    for (int i = 0; i < NUM_KV; ++i) {
        byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);
        byte[] qualifier = RandomKeyValueUtil.randomRowOrQualifier(rand);
        byte[] value = RandomKeyValueUtil.randomValue(rand);
        KeyValue kv;
        if (useTags) {
            Tag t = new ArrayBackedTag((byte) 1, "visibility");
            List<Tag> tagList = new ArrayList<>();
            tagList.add(t);
            Tag[] tags = new Tag[1];
            tags[0] = t;
            kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, Math.abs(rand.nextLong()), generateKeyType(rand), value, 0, value.length, tagList);
        } else {
            kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, Math.abs(rand.nextLong()), generateKeyType(rand), value, 0, value.length);
        }
        sfw.append(kv);
    }
    sfw.close();
    storeFilePath = sfw.getPath();
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue) ArrayList(java.util.ArrayList) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) ArrayBackedTag(org.apache.hadoop.hbase.ArrayBackedTag) Tag(org.apache.hadoop.hbase.Tag)

Example 30 with StoreFileWriter

use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.

the class TestPrefetch method writeStoreFile.

private Path writeStoreFile(String fname) throws IOException {
    Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);
    HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();
    StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).build();
    final int rowLen = 32;
    for (int i = 0; i < NUM_KV; ++i) {
        byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
        byte[] v = RandomKeyValueUtil.randomValue(RNG);
        int cfLen = RNG.nextInt(k.length - rowLen + 1);
        KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
        sfw.append(kv);
    }
    sfw.close();
    return sfw.getPath();
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) KeyValue(org.apache.hadoop.hbase.KeyValue)

Aggregations

StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)30 Path (org.apache.hadoop.fs.Path)23 HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)14 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)13 IOException (java.io.IOException)11 Cell (org.apache.hadoop.hbase.Cell)11 InterruptedIOException (java.io.InterruptedIOException)9 ArrayList (java.util.ArrayList)9 FileSystem (org.apache.hadoop.fs.FileSystem)9 KeyValue (org.apache.hadoop.hbase.KeyValue)9 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)6 Test (org.junit.Test)6 Date (java.util.Date)5 TableName (org.apache.hadoop.hbase.TableName)5 ScannerContext (org.apache.hadoop.hbase.regionserver.ScannerContext)5 Map (java.util.Map)4 HashMap (java.util.HashMap)3 TreeMap (java.util.TreeMap)3 DoNotRetryIOException (org.apache.hadoop.hbase.DoNotRetryIOException)3 Algorithm (org.apache.hadoop.hbase.io.compress.Compression.Algorithm)3