use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.
the class TestHFile method metablocks.
private void metablocks(final String compress) throws Exception {
if (cacheConf == null)
cacheConf = new CacheConfig(conf);
Path mFile = new Path(ROOT_DIR, "meta.hfile");
FSDataOutputStream fout = createFSOutput(mFile);
HFileContext meta = new HFileContextBuilder().withCompression(HFileWriterImpl.compressionByName(compress)).withBlockSize(minBlockSize).build();
Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
someTestingWithMetaBlock(writer);
writer.close();
fout.close();
FSDataInputStream fin = fs.open(mFile);
Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile), this.fs.getFileStatus(mFile).getLen(), cacheConf, conf);
reader.loadFileInfo();
// No data -- this should return false.
assertFalse(reader.getScanner(false, false).seekTo());
someReadingWithMetaBlock(reader);
fs.delete(mFile, true);
reader.close();
fin.close();
}
use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.
the class TestHFile method testNullMetaBlocks.
@Test
public void testNullMetaBlocks() throws Exception {
if (cacheConf == null)
cacheConf = new CacheConfig(conf);
for (Compression.Algorithm compressAlgo : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
FSDataOutputStream fout = createFSOutput(mFile);
HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo).withBlockSize(minBlockSize).build();
Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes());
writer.append(kv);
writer.close();
fout.close();
Reader reader = HFile.createReader(fs, mFile, cacheConf, conf);
reader.loadFileInfo();
assertNull(reader.getMetaBlock("non-existant", false));
}
}
use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.
the class TestHFile method basicWithSomeCodec.
/**
* test none codecs
* @param useTags
*/
void basicWithSomeCodec(String codec, boolean useTags) throws IOException {
if (useTags) {
conf.setInt("hfile.format.version", 3);
}
if (cacheConf == null)
cacheConf = new CacheConfig(conf);
Path ncHFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString() + useTags);
FSDataOutputStream fout = createFSOutput(ncHFile);
HFileContext meta = new HFileContextBuilder().withBlockSize(minBlockSize).withCompression(HFileWriterImpl.compressionByName(codec)).build();
Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).withComparator(CellComparator.COMPARATOR).create();
LOG.info(writer);
writeRecords(writer, useTags);
fout.close();
FSDataInputStream fin = fs.open(ncHFile);
Reader reader = HFile.createReaderFromStream(ncHFile, fs.open(ncHFile), fs.getFileStatus(ncHFile).getLen(), cacheConf, conf);
System.out.println(cacheConf.toString());
// Load up the index.
reader.loadFileInfo();
// Get a scanner that caches and that does not use pread.
HFileScanner scanner = reader.getScanner(true, false);
// Align scanner at start of the file.
scanner.seekTo();
readAllRecords(scanner);
int seekTo = scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50)));
System.out.println(seekTo);
assertTrue("location lookup failed", scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0);
// read the key and see if it matches
ByteBuffer readKey = ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey());
assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey)));
scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
ByteBuffer val1 = scanner.getValue();
scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
ByteBuffer val2 = scanner.getValue();
assertTrue(Arrays.equals(Bytes.toBytes(val1), Bytes.toBytes(val2)));
reader.close();
fin.close();
fs.delete(ncHFile, true);
}
use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.
the class TestHFile method testEmptyHFile.
/**
* Test empty HFile.
* Test all features work reasonably when hfile is empty of entries.
* @throws IOException
*/
@Test
public void testEmptyHFile() throws IOException {
if (cacheConf == null)
cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, testName.getMethodName());
HFileContext context = new HFileContextBuilder().withIncludesTags(false).build();
Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
w.close();
Reader r = HFile.createReader(fs, f, cacheConf, conf);
r.loadFileInfo();
assertNull(r.getFirstKey());
assertNull(r.getLastKey());
}
use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.
the class TestHFile method testCorruptTruncatedHFile.
/**
* Create a truncated hfile and verify that exception thrown.
*/
@Test
public void testCorruptTruncatedHFile() throws IOException {
if (cacheConf == null)
cacheConf = new CacheConfig(conf);
Path f = new Path(ROOT_DIR, testName.getMethodName());
HFileContext context = new HFileContextBuilder().build();
Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f).withFileContext(context).create();
writeSomeRecords(w, 0, 100, false);
w.close();
Path trunc = new Path(f.getParent(), "trucated");
truncateFile(fs, w.getPath(), trunc);
try {
Reader r = HFile.createReader(fs, trunc, cacheConf, conf);
} catch (CorruptHFileException che) {
// Expected failure
return;
}
fail("Should have thrown exception");
}
Aggregations