Search in sources :

Example 26 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHStore method addStoreFile.

private void addStoreFile() throws IOException {
    HStoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = this.store.getMaxSequenceId().orElse(0L);
    Configuration c = TEST_UTIL.getConfiguration();
    FileSystem fs = FileSystem.get(c);
    HFileContext fileContext = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFileWriter w = new StoreFileWriter.Builder(c, new CacheConfig(c), fs).withOutputDir(storedir).withFileContext(fileContext).build();
    w.appendMetadata(seqid + 1, false);
    w.close();
    LOG.info("Added store file:" + w.getPath());
}
Also used : Path(org.apache.hadoop.fs.Path) Configuration(org.apache.hadoop.conf.Configuration) CompactionConfiguration(org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext)

Example 27 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHStore method testEmptyStoreFile.

/**
 * Test for hbase-1686.
 */
@Test
public void testEmptyStoreFile() throws IOException {
    init(this.name.getMethodName());
    // Write a store file.
    this.store.add(new KeyValue(row, family, qf1, 1, (byte[]) null), null);
    this.store.add(new KeyValue(row, family, qf2, 1, (byte[]) null), null);
    flush(1);
    // Now put in place an empty store file.  Its a little tricky.  Have to
    // do manually with hacked in sequence id.
    HStoreFile f = this.store.getStorefiles().iterator().next();
    Path storedir = f.getPath().getParent();
    long seqid = f.getMaxSequenceId();
    Configuration c = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(c);
    HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
    StoreFileWriter w = new StoreFileWriter.Builder(c, new CacheConfig(c), fs).withOutputDir(storedir).withFileContext(meta).build();
    w.appendMetadata(seqid + 1, false);
    w.close();
    this.store.close();
    // Reopen it... should pick up two files
    this.store = new HStore(this.store.getHRegion(), this.store.getColumnFamilyDescriptor(), c, false);
    assertEquals(2, this.store.getStorefilesCount());
    result = HBaseTestingUtil.getFromStoreFile(store, get.getRow(), qualifiers);
    assertEquals(1, result.size());
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) CompactionConfiguration(org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 28 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHStoreFile method testMultipleTimestamps.

/**
 * Test to ensure correctness when using StoreFile with multiple timestamps
 */
@Test
public void testMultipleTimestamps() throws IOException {
    byte[] family = Bytes.toBytes("familyname");
    byte[] qualifier = Bytes.toBytes("qualifier");
    int numRows = 10;
    long[] timestamps = new long[] { 20, 10, 5, 1 };
    Scan scan = new Scan();
    // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
    Path storedir = new Path(new Path(testDir, "7e0102"), Bytes.toString(family));
    Path dir = new Path(storedir, "1234567890");
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    // Make a store file and write data to it.
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withOutputDir(dir).withFileContext(meta).build();
    List<KeyValue> kvList = getKeyValueSet(timestamps, numRows, qualifier, family);
    for (KeyValue kv : kvList) {
        writer.append(kv);
    }
    writer.appendMetadata(0, false);
    writer.close();
    HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
    HStore store = mock(HStore.class);
    when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family));
    hsf.initReader();
    StoreFileReader reader = hsf.getReader();
    StoreFileScanner scanner = getStoreFileScanner(reader, false, false);
    TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
    columns.add(qualifier);
    scan.setTimeRange(20, 100);
    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
    scan.setTimeRange(1, 2);
    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
    scan.setTimeRange(8, 10);
    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
    // lets make sure it still works with column family time ranges
    scan.setColumnFamilyTimeRange(family, 7, 50);
    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
    // This test relies on the timestamp range optimization
    scan = new Scan();
    scan.setTimeRange(27, 50);
    assertTrue(!scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
    // should still use the scanner because we override the family time range
    scan = new Scan();
    scan.setTimeRange(27, 50);
    scan.setColumnFamilyTimeRange(family, 7, 50);
    assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) TreeSet(java.util.TreeSet) Scan(org.apache.hadoop.hbase.client.Scan) Test(org.junit.Test)

Example 29 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHStoreFile method testBasicHalfAndHFileLinkMapFile.

/**
 * Write a file and then assert that we can read from top and bottom halves using two
 * HalfMapFiles, as well as one HalfMapFile and one HFileLink file.
 */
@Test
public void testBasicHalfAndHFileLinkMapFile() throws Exception {
    final RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf("testBasicHalfAndHFileLinkMapFile")).build();
    // The locations of HFileLink refers hfiles only should be consistent with the table dir
    // create by CommonFSUtils directory, so we should make the region directory under
    // the mode of CommonFSUtils.getTableDir here.
    HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, CommonFSUtils.getTableDir(CommonFSUtils.getRootDir(conf), hri.getTable()), hri);
    HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(regionFs.createTempName()).withFileContext(meta).build();
    writeStoreFile(writer);
    Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
    HStoreFile sf = new HStoreFile(this.fs, sfPath, conf, cacheConf, BloomType.NONE, true);
    checkHalfHFile(regionFs, sf);
}
Also used : Path(org.apache.hadoop.fs.Path) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Example 30 with HFileContextBuilder

use of org.apache.hadoop.hbase.io.hfile.HFileContextBuilder in project hbase by apache.

the class TestHStoreFile method testStoreFileReference.

@Test
public void testStoreFileReference() throws Exception {
    final RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf("testStoreFileReference")).build();
    HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri);
    HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
    // Make a store file and write data to it.
    StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(regionFs.createTempName()).withFileContext(meta).build();
    writeStoreFile(writer);
    Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
    writer.close();
    HStoreFile file = new HStoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true);
    file.initReader();
    StoreFileReader r = file.getReader();
    assertNotNull(r);
    StoreFileScanner scanner = new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false);
    // Verify after instantiating scanner refCount is increased
    assertTrue("Verify file is being referenced", file.isReferencedInReads());
    scanner.close();
    // Verify after closing scanner refCount is decreased
    assertFalse("Verify file is not being referenced", file.isReferencedInReads());
}
Also used : Path(org.apache.hadoop.fs.Path) HFileScanner(org.apache.hadoop.hbase.io.hfile.HFileScanner) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) HFileContextBuilder(org.apache.hadoop.hbase.io.hfile.HFileContextBuilder) HFileContext(org.apache.hadoop.hbase.io.hfile.HFileContext) Test(org.junit.Test)

Aggregations

HFileContextBuilder (org.apache.hadoop.hbase.io.hfile.HFileContextBuilder)89 HFileContext (org.apache.hadoop.hbase.io.hfile.HFileContext)82 Path (org.apache.hadoop.fs.Path)52 Test (org.junit.Test)48 KeyValue (org.apache.hadoop.hbase.KeyValue)39 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)27 FileSystem (org.apache.hadoop.fs.FileSystem)26 Cell (org.apache.hadoop.hbase.Cell)17 HFile (org.apache.hadoop.hbase.io.hfile.HFile)16 ByteBuffer (java.nio.ByteBuffer)15 Configuration (org.apache.hadoop.conf.Configuration)14 HFileScanner (org.apache.hadoop.hbase.io.hfile.HFileScanner)12 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)12 DataOutputStream (java.io.DataOutputStream)6 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)6 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)6 DataBlockEncoding (org.apache.hadoop.hbase.io.encoding.DataBlockEncoding)6 ByteArrayOutputStream (java.io.ByteArrayOutputStream)5 IOException (java.io.IOException)5 ArrayList (java.util.ArrayList)5