use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestHStoreFile method testHFileLink.
@Test
public void testHFileLink() throws IOException {
final RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf("testHFileLinkTb")).build();
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf);
CommonFSUtils.setRootDir(testConf, testDir);
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(regionFs.createTempName()).withFileContext(meta).build();
writeStoreFile(writer);
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
Path linkFilePath = new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
// Try to open store file from link
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true);
HStoreFile hsf = new HStoreFile(storeFileInfo, BloomType.NONE, cacheConf);
assertTrue(storeFileInfo.isLink());
hsf.initReader();
// Now confirm that I can read from the link
int count = 1;
HFileScanner s = hsf.getReader().getScanner(false, false);
s.seekTo();
while (s.next()) {
count++;
}
assertEquals((LAST_CHAR - FIRST_CHAR + 1) * (LAST_CHAR - FIRST_CHAR + 1), count);
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestHStoreFile method testBloomFilter.
@Test
public void testBloomFilter() throws Exception {
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
// write the file
Path f = new Path(ROOT_DIR, name.getMethodName());
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f).withBloomType(BloomType.ROW).withMaxKeyCount(2000).withFileContext(meta).build();
bloomWriteRead(writer, fs);
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestHRegionReplayEvents method createHFileForFamilies.
private String createHFileForFamilies(Path testPath, byte[] family, byte[] valueBytes) throws IOException {
HFile.WriterFactory hFileFactory = HFile.getWriterFactoryNoCache(TEST_UTIL.getConfiguration());
// TODO We need a way to do this without creating files
Path testFile = new Path(testPath, TEST_UTIL.getRandomUUID().toString());
FSDataOutputStream out = TEST_UTIL.getTestFileSystem().create(testFile);
try {
hFileFactory.withOutputStream(out);
hFileFactory.withFileContext(new HFileContextBuilder().build());
HFile.Writer writer = hFileFactory.create();
try {
writer.append(new KeyValue(ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY).setRow(valueBytes).setFamily(family).setQualifier(valueBytes).setTimestamp(0L).setType(KeyValue.Type.Put.getCode()).setValue(valueBytes).build()));
} finally {
writer.close();
}
} finally {
out.close();
}
return testFile.toString();
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestHStoreFile method testReseek.
/**
* Test for HBASE-8012
*/
@Test
public void testReseek() throws Exception {
// write the file
Path f = new Path(ROOT_DIR, name.getMethodName());
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f).withFileContext(meta).build();
writeStoreFile(writer);
writer.close();
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader());
// Now do reseek with empty KV to position to the beginning of the file
KeyValue k = KeyValueUtil.createFirstOnRow(HConstants.EMPTY_BYTE_ARRAY);
StoreFileScanner s = getStoreFileScanner(reader, false, false);
s.reseek(k);
assertNotNull("Intial reseek should position at the beginning of the file", s.peek());
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestRegionObserverInterface method createHFile.
private static void createHFile(Configuration conf, FileSystem fs, Path path, byte[] family, byte[] qualifier) throws IOException {
HFileContext context = new HFileContextBuilder().build();
HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path).withFileContext(context).create();
long now = EnvironmentEdgeManager.currentTime();
try {
for (int i = 1; i <= 9; i++) {
KeyValue kv = new KeyValue(Bytes.toBytes(i + ""), family, qualifier, now, Bytes.toBytes(i + ""));
writer.append(kv);
}
} finally {
writer.close();
}
}
Aggregations