use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.
the class TestMobFile method testGetScanner.
@Test
public void testGetScanner() throws Exception {
Path testDir = TEST_UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
MobTestUtil.writeStoreFile(writer, testName.getMethodName());
MobFile mobFile = new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true));
assertNotNull(mobFile.getScanner());
assertTrue(mobFile.getScanner() instanceof StoreFileScanner);
}
use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.
the class TestIgnoreUnknownFamily method addStoreFileToKnownFamily.
private void addStoreFileToKnownFamily(RegionInfo region) throws IOException {
MasterFileSystem mfs = UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
Path regionDir = FSUtils.getRegionDirFromRootDir(CommonFSUtils.getRootDir(mfs.getConfiguration()), region);
Path familyDir = new Path(regionDir, Bytes.toString(UNKNOWN_FAMILY));
StoreFileWriter writer = new StoreFileWriter.Builder(mfs.getConfiguration(), mfs.getFileSystem()).withOutputDir(familyDir).withFileContext(new HFileContextBuilder().build()).build();
writer.close();
}
use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.
the class TestHFile method writeStoreFile.
private Path writeStoreFile() throws IOException {
Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile");
HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(conf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).build();
final int rowLen = 32;
Random RNG = new Random();
for (int i = 0; i < 1000; ++i) {
byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
byte[] v = RandomKeyValueUtil.randomValue(RNG);
int cfLen = RNG.nextInt(k.length - rowLen + 1);
KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
sfw.append(kv);
}
sfw.close();
return sfw.getPath();
}
use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.
the class TestCacheOnWrite method writeStoreFile.
private void writeStoreFile(boolean useTags) throws IOException {
Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "test_cache_on_write");
HFileContext meta = new HFileContextBuilder().withCompression(compress).withBytesPerCheckSum(CKBYTES).withChecksumType(ChecksumType.NULL).withBlockSize(DATA_BLOCK_SIZE).withDataBlockEncoding(NoOpDataBlockEncoder.INSTANCE.getDataBlockEncoding()).withIncludesTags(useTags).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).withBloomType(BLOOM_TYPE).withMaxKeyCount(NUM_KV).build();
byte[] cf = Bytes.toBytes("fam");
for (int i = 0; i < NUM_KV; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedKey(rand, i);
byte[] qualifier = RandomKeyValueUtil.randomRowOrQualifier(rand);
byte[] value = RandomKeyValueUtil.randomValue(rand);
KeyValue kv;
if (useTags) {
Tag t = new ArrayBackedTag((byte) 1, "visibility");
List<Tag> tagList = new ArrayList<>();
tagList.add(t);
Tag[] tags = new Tag[1];
tags[0] = t;
kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, Math.abs(rand.nextLong()), generateKeyType(rand), value, 0, value.length, tagList);
} else {
kv = new KeyValue(row, 0, row.length, cf, 0, cf.length, qualifier, 0, qualifier.length, Math.abs(rand.nextLong()), generateKeyType(rand), value, 0, value.length);
}
sfw.append(kv);
}
sfw.close();
storeFilePath = sfw.getPath();
}
use of org.apache.hadoop.hbase.regionserver.StoreFileWriter in project hbase by apache.
the class TestPrefetch method writeStoreFile.
private Path writeStoreFile(String fname) throws IOException {
Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), fname);
HFileContext meta = new HFileContextBuilder().withBlockSize(DATA_BLOCK_SIZE).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(storeFileParentDir).withFileContext(meta).build();
final int rowLen = 32;
for (int i = 0; i < NUM_KV; ++i) {
byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
byte[] v = RandomKeyValueUtil.randomValue(RNG);
int cfLen = RNG.nextInt(k.length - rowLen + 1);
KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
sfw.append(kv);
}
sfw.close();
return sfw.getPath();
}
Aggregations