use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestCachedMobFile method testOpenClose.
@Test
public void testOpenClose() throws Exception {
String caseName = testName.getMethodName();
Path testDir = TEST_UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(testDir).withFileContext(meta).build();
MobTestUtil.writeStoreFile(writer, caseName);
CachedMobFile cachedMobFile = CachedMobFile.create(fs, writer.getPath(), conf, cacheConf);
assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
cachedMobFile.open();
assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
cachedMobFile.open();
assertEquals(EXPECTED_REFERENCE_TWO, cachedMobFile.getReferenceCount());
cachedMobFile.close();
assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount());
cachedMobFile.close();
assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount());
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestCachedMobFile method testCompare.
@SuppressWarnings("SelfComparison")
@Test
public void testCompare() throws Exception {
String caseName = testName.getMethodName();
Path testDir = TEST_UTIL.getDataTestDir();
FileSystem fs = testDir.getFileSystem(conf);
Path outputDir1 = new Path(testDir, FAMILY1);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
StoreFileWriter writer1 = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(outputDir1).withFileContext(meta).build();
MobTestUtil.writeStoreFile(writer1, caseName);
CachedMobFile cachedMobFile1 = CachedMobFile.create(fs, writer1.getPath(), conf, cacheConf);
Path outputDir2 = new Path(testDir, FAMILY2);
StoreFileWriter writer2 = new StoreFileWriter.Builder(conf, cacheConf, fs).withOutputDir(outputDir2).withFileContext(meta).build();
MobTestUtil.writeStoreFile(writer2, caseName);
CachedMobFile cachedMobFile2 = CachedMobFile.create(fs, writer2.getPath(), conf, cacheConf);
cachedMobFile1.access(1);
cachedMobFile2.access(2);
assertEquals(1, cachedMobFile1.compareTo(cachedMobFile2));
assertEquals(-1, cachedMobFile2.compareTo(cachedMobFile1));
assertEquals(0, cachedMobFile1.compareTo(cachedMobFile1));
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestFSErrorsExposed method testStoreFileScannerThrowsErrors.
/**
* Injects errors into the pread calls of an on-disk file, and makes
* sure those bubble up to the StoreFileScanner
*/
@Test
public void testStoreFileScannerThrowsErrors() throws IOException {
Path hfilePath = new Path(new Path(util.getDataTestDir("internalScannerExposesErrors"), "regionname"), "familyname");
HFileSystem hfs = (HFileSystem) util.getTestFileSystem();
FaultyFileSystem faultyfs = new FaultyFileSystem(hfs.getBackingFs());
HFileSystem fs = new HFileSystem(faultyfs);
CacheConfig cacheConf = new CacheConfig(util.getConfiguration());
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(util.getConfiguration(), cacheConf, hfs).withOutputDir(hfilePath).withFileContext(meta).build();
TestHStoreFile.writeStoreFile(writer, Bytes.toBytes("cf"), Bytes.toBytes("qual"));
HStoreFile sf = new HStoreFile(fs, writer.getPath(), util.getConfiguration(), cacheConf, BloomType.NONE, true);
List<StoreFileScanner> scanners = StoreFileScanner.getScannersForStoreFiles(Collections.singletonList(sf), false, true, false, false, // 0 is passed as readpoint because this test operates on HStoreFile directly
0);
KeyValueScanner scanner = scanners.get(0);
FaultyInputStream inStream = faultyfs.inStreams.get(0).get();
assertNotNull(inStream);
scanner.seek(KeyValue.LOWESTKEY);
// Do at least one successful read
assertNotNull(scanner.next());
faultyfs.startFaults();
try {
int scanned = 0;
while (scanner.next() != null) {
scanned++;
}
fail("Scanner didn't throw after faults injected");
} catch (IOException ioe) {
LOG.info("Got expected exception", ioe);
assertTrue(ioe.getMessage().contains("Could not iterate"));
}
scanner.close();
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestDataBlockEncodingTool method createHFileWithTags.
private void createHFileWithTags(Path path, boolean useTags, boolean allTags) throws IOException {
HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).withIncludesTags(useTags).build();
sfw = new StoreFileWriter.Builder(conf, fs).withFilePath(path).withFileContext(meta).build();
long now = EnvironmentEdgeManager.currentTime();
byte[] FAMILY = Bytes.toBytes("cf");
byte[] QUALIFIER = Bytes.toBytes("q");
try {
for (char d = 'a'; d <= 'z'; d++) {
for (char e = 'a'; e <= 'z'; e++) {
byte[] b = new byte[] { (byte) d, (byte) e };
KeyValue kv;
if (useTags) {
if (allTags) {
// Write cells with tags to HFile.
Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 0, Bytes.toString(b)), new ArrayBackedTag((byte) 0, Bytes.toString(b)) };
kv = new KeyValue(b, FAMILY, QUALIFIER, now, b, tags);
} else {
// Write half cells with tags and half without tags to HFile.
if ((e - 'a') % 2 == 0) {
kv = new KeyValue(b, FAMILY, QUALIFIER, now, b);
} else {
Tag[] tags = new Tag[] { new ArrayBackedTag((byte) 0, Bytes.toString(b)), new ArrayBackedTag((byte) 0, Bytes.toString(b)) };
kv = new KeyValue(b, FAMILY, QUALIFIER, now, b, tags);
}
}
} else {
// Write cells without tags to HFile.
kv = new KeyValue(b, FAMILY, QUALIFIER, now, b);
}
sfw.append(kv);
}
}
sfw.appendMetadata(0, false);
} finally {
sfw.close();
}
}
use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.
the class TestHalfStoreFileReader method testHalfScanner.
// Tests the scanner on an HFile that is backed by HalfStoreFiles
@Test
public void testHalfScanner() throws IOException {
String root_dir = TEST_UTIL.getDataTestDir().toString();
Path p = new Path(root_dir, "test");
Configuration conf = TEST_UTIL.getConfiguration();
FileSystem fs = FileSystem.get(conf);
CacheConfig cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
HFile.Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();
// write some things.
List<KeyValue> items = genSomeKeys();
for (KeyValue kv : items) {
w.append(kv);
}
w.close();
HFile.Reader r = HFile.createReader(fs, p, cacheConf, true, conf);
Cell midKV = r.midKey().get();
byte[] midkey = CellUtil.cloneRow(midKV);
Reference bottom = new Reference(midkey, Reference.Range.bottom);
Reference top = new Reference(midkey, Reference.Range.top);
// Ugly code to get the item before the midkey
KeyValue beforeMidKey = null;
for (KeyValue item : items) {
if (CellComparatorImpl.COMPARATOR.compare(item, midKV) >= 0) {
break;
}
beforeMidKey = item;
}
System.out.println("midkey: " + midKV + " or: " + Bytes.toStringBinary(midkey));
System.out.println("beforeMidKey: " + beforeMidKey);
// Seek on the splitKey, should be in top, not in bottom
Cell foundKeyValue = doTestOfSeekBefore(p, fs, bottom, midKV, cacheConf);
assertEquals(beforeMidKey, foundKeyValue);
// Seek tot the last thing should be the penultimate on the top, the one before the midkey on
// the bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(items.size() - 1), cacheConf);
assertEquals(items.get(items.size() - 2), foundKeyValue);
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(items.size() - 1), cacheConf);
assertEquals(beforeMidKey, foundKeyValue);
// Try and seek before something that is in the bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(0), cacheConf);
assertNull(foundKeyValue);
// Try and seek before the first thing.
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(0), cacheConf);
assertNull(foundKeyValue);
// Try and seek before the second thing in the top and bottom.
foundKeyValue = doTestOfSeekBefore(p, fs, top, items.get(1), cacheConf);
assertNull(foundKeyValue);
foundKeyValue = doTestOfSeekBefore(p, fs, bottom, items.get(1), cacheConf);
assertEquals(items.get(0), foundKeyValue);
// Try to seek before the splitKey in the top file
foundKeyValue = doTestOfSeekBefore(p, fs, top, midKV, cacheConf);
assertNull(foundKeyValue);
}
Aggregations