Search in sources :

Example 1 with Writer

use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.

the class TestHFile method metablocks.

private void metablocks(final String compress) throws Exception {
    if (cacheConf == null)
        cacheConf = new CacheConfig(conf);
    Path mFile = new Path(ROOT_DIR, "meta.hfile");
    FSDataOutputStream fout = createFSOutput(mFile);
    HFileContext meta = new HFileContextBuilder().withCompression(HFileWriterImpl.compressionByName(compress)).withBlockSize(minBlockSize).build();
    Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
    someTestingWithMetaBlock(writer);
    writer.close();
    fout.close();
    FSDataInputStream fin = fs.open(mFile);
    Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile), this.fs.getFileStatus(mFile).getLen(), cacheConf, conf);
    reader.loadFileInfo();
    // No data -- this should return false.
    assertFalse(reader.getScanner(false, false).seekTo());
    someReadingWithMetaBlock(reader);
    fs.delete(mFile, true);
    reader.close();
    fin.close();
}
Also used : Path(org.apache.hadoop.fs.Path) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) Writer(org.apache.hadoop.hbase.io.hfile.HFile.Writer)

Example 2 with Writer

use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.

the class TestHFile method testNullMetaBlocks.

@Test
public void testNullMetaBlocks() throws Exception {
    if (cacheConf == null)
        cacheConf = new CacheConfig(conf);
    for (Compression.Algorithm compressAlgo : HBaseTestingUtility.COMPRESSION_ALGORITHMS) {
        Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
        FSDataOutputStream fout = createFSOutput(mFile);
        HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo).withBlockSize(minBlockSize).build();
        Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).create();
        KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes());
        writer.append(kv);
        writer.close();
        fout.close();
        Reader reader = HFile.createReader(fs, mFile, cacheConf, conf);
        reader.loadFileInfo();
        assertNull(reader.getMetaBlock("non-existant", false));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Compression(org.apache.hadoop.hbase.io.compress.Compression) KeyValue(org.apache.hadoop.hbase.KeyValue) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) Writer(org.apache.hadoop.hbase.io.hfile.HFile.Writer) Test(org.junit.Test)

Example 3 with Writer

use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.

the class TestHFile method basicWithSomeCodec.

/**
   * test none codecs
   * @param useTags
   */
void basicWithSomeCodec(String codec, boolean useTags) throws IOException {
    if (useTags) {
        conf.setInt("hfile.format.version", 3);
    }
    if (cacheConf == null)
        cacheConf = new CacheConfig(conf);
    Path ncHFile = new Path(ROOT_DIR, "basic.hfile." + codec.toString() + useTags);
    FSDataOutputStream fout = createFSOutput(ncHFile);
    HFileContext meta = new HFileContextBuilder().withBlockSize(minBlockSize).withCompression(HFileWriterImpl.compressionByName(codec)).build();
    Writer writer = HFile.getWriterFactory(conf, cacheConf).withOutputStream(fout).withFileContext(meta).withComparator(CellComparator.COMPARATOR).create();
    LOG.info(writer);
    writeRecords(writer, useTags);
    fout.close();
    FSDataInputStream fin = fs.open(ncHFile);
    Reader reader = HFile.createReaderFromStream(ncHFile, fs.open(ncHFile), fs.getFileStatus(ncHFile).getLen(), cacheConf, conf);
    System.out.println(cacheConf.toString());
    // Load up the index.
    reader.loadFileInfo();
    // Get a scanner that caches and that does not use pread.
    HFileScanner scanner = reader.getScanner(true, false);
    // Align scanner at start of the file.
    scanner.seekTo();
    readAllRecords(scanner);
    int seekTo = scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50)));
    System.out.println(seekTo);
    assertTrue("location lookup failed", scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(50))) == 0);
    // read the key and see if it matches
    ByteBuffer readKey = ByteBuffer.wrap(((KeyValue) scanner.getKey()).getKey());
    assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), Bytes.toBytes(readKey)));
    scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
    ByteBuffer val1 = scanner.getValue();
    scanner.seekTo(KeyValueUtil.createKeyValueFromKey(getSomeKey(0)));
    ByteBuffer val2 = scanner.getValue();
    assertTrue(Arrays.equals(Bytes.toBytes(val1), Bytes.toBytes(val2)));
    reader.close();
    fin.close();
    fs.delete(ncHFile, true);
}
Also used : Path(org.apache.hadoop.fs.Path) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) ByteBuffer(java.nio.ByteBuffer) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) Writer(org.apache.hadoop.hbase.io.hfile.HFile.Writer)

Example 4 with Writer

use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.

the class TestHFile method testEmptyHFile.

/**
   * Test empty HFile.
   * Test all features work reasonably when hfile is empty of entries.
   * @throws IOException
   */
@Test
public void testEmptyHFile() throws IOException {
    if (cacheConf == null)
        cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, testName.getMethodName());
    HFileContext context = new HFileContextBuilder().withIncludesTags(false).build();
    Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
    w.close();
    Reader r = HFile.createReader(fs, f, cacheConf, conf);
    r.loadFileInfo();
    assertNull(r.getFirstKey());
    assertNull(r.getLastKey());
}
Also used : Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) Writer(org.apache.hadoop.hbase.io.hfile.HFile.Writer) Test(org.junit.Test)

Example 5 with Writer

use of org.apache.hadoop.hbase.io.hfile.HFile.Writer in project hbase by apache.

the class TestHFile method testCorruptTruncatedHFile.

/**
   * Create a truncated hfile and verify that exception thrown.
   */
@Test
public void testCorruptTruncatedHFile() throws IOException {
    if (cacheConf == null)
        cacheConf = new CacheConfig(conf);
    Path f = new Path(ROOT_DIR, testName.getMethodName());
    HFileContext context = new HFileContextBuilder().build();
    Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f).withFileContext(context).create();
    writeSomeRecords(w, 0, 100, false);
    w.close();
    Path trunc = new Path(f.getParent(), "trucated");
    truncateFile(fs, w.getPath(), trunc);
    try {
        Reader r = HFile.createReader(fs, trunc, cacheConf, conf);
    } catch (CorruptHFileException che) {
        // Expected failure
        return;
    }
    fail("Should have thrown exception");
}
Also used : Path(org.apache.hadoop.fs.Path) Reader(org.apache.hadoop.hbase.io.hfile.HFile.Reader) StoreFileWriter(org.apache.hadoop.hbase.regionserver.StoreFileWriter) Writer(org.apache.hadoop.hbase.io.hfile.HFile.Writer) Test(org.junit.Test)

Aggregations

Writer (org.apache.hadoop.hbase.io.hfile.HFile.Writer)6 Path (org.apache.hadoop.fs.Path)5 Reader (org.apache.hadoop.hbase.io.hfile.HFile.Reader)5 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)5 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)4 Test (org.junit.Test)3 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)2 KeyValue (org.apache.hadoop.hbase.KeyValue)2 ByteBuffer (java.nio.ByteBuffer)1 Compression (org.apache.hadoop.hbase.io.compress.Compression)1 BytesWritable (org.apache.hadoop.io.BytesWritable)1