Search in sources :

Example 16 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class MockStoreFileGenerator method createMockStoreFile.

protected HStoreFile createMockStoreFile(final long sizeInBytes, final long seqId) {
    HStoreFile mockSf = mock(HStoreFile.class);
    StoreFileReader reader = mock(StoreFileReader.class);
    String stringPath = "/hbase/testTable/regionA/" + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random);
    Path path = new Path(stringPath);
    when(reader.getSequenceID()).thenReturn(seqId);
    when(reader.getTotalUncompressedBytes()).thenReturn(sizeInBytes);
    when(reader.length()).thenReturn(sizeInBytes);
    when(mockSf.getPath()).thenReturn(path);
    when(mockSf.excludeFromMinorCompaction()).thenReturn(false);
    // TODO come back to
    when(mockSf.isReference()).thenReturn(false);
    // this when selection takes this into account
    when(mockSf.getReader()).thenReturn(reader);
    String toString = MoreObjects.toStringHelper("MockStoreFile").add("isReference", false).add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", seqId).add("path", stringPath).toString();
    when(mockSf.toString()).thenReturn(toString);
    return mockSf;
}
Also used : Path(org.apache.hadoop.fs.Path) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) StoreFileReader(org.apache.hadoop.hbase.regionserver.StoreFileReader)

Example 17 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class PerfTestCompactionPolicies method testSelection.

@Test
public final void testSelection() throws Exception {
    long fileDiff = 0;
    for (List<HStoreFile> storeFileList : generator) {
        List<HStoreFile> currentFiles = new ArrayList<>(18);
        for (HStoreFile file : storeFileList) {
            currentFiles.add(file);
            currentFiles = runIteration(currentFiles);
        }
        fileDiff += (storeFileList.size() - currentFiles.size());
    }
    // print out tab delimited so that it can be used in excel/gdocs.
    System.out.println(cp.getClass().getSimpleName() + "\t" + fileGenClass.getSimpleName() + "\t" + max + "\t" + min + "\t" + ratio + "\t" + written + "\t" + fileDiff);
}
Also used : ArrayList(java.util.ArrayList) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) Test(org.junit.Test)

Example 18 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class PerfTestCompactionPolicies method runIteration.

private List<HStoreFile> runIteration(List<HStoreFile> startingStoreFiles) throws IOException {
    List<HStoreFile> storeFiles = new ArrayList<>(startingStoreFiles);
    CompactionRequestImpl req = cp.selectCompaction(storeFiles, new ArrayList<>(), false, false, false);
    long newFileSize = 0;
    Collection<HStoreFile> filesToCompact = req.getFiles();
    if (!filesToCompact.isEmpty()) {
        storeFiles = new ArrayList<>(storeFiles);
        storeFiles.removeAll(filesToCompact);
        for (HStoreFile storeFile : filesToCompact) {
            newFileSize += storeFile.getReader().length();
        }
        storeFiles.add(createMockStoreFileBytes(newFileSize));
    }
    written += newFileSize;
    return storeFiles;
}
Also used : ArrayList(java.util.ArrayList) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile)

Example 19 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestCompactedHFilesDischarger method testCleanerWithParallelScanners.

@Test
public void testCleanerWithParallelScanners() throws Exception {
    // Create the cleaner object
    CompactedHFilesDischarger cleaner = new CompactedHFilesDischarger(1000, (Stoppable) null, rss, false);
    // Add some data to the region and do some flushes
    for (int i = 1; i < 10; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    for (int i = 11; i < 20; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    for (int i = 21; i < 30; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    HStore store = region.getStore(fam);
    assertEquals(3, store.getStorefilesCount());
    Collection<HStoreFile> storefiles = store.getStorefiles();
    Collection<HStoreFile> compactedfiles = store.getStoreEngine().getStoreFileManager().getCompactedfiles();
    // None of the files should be in compacted state.
    for (HStoreFile file : storefiles) {
        assertFalse(file.isCompactedAway());
    }
    startScannerThreads();
    // Do compaction
    region.compact(true);
    storefiles = store.getStorefiles();
    int usedReaderCount = 0;
    int unusedReaderCount = 0;
    for (HStoreFile file : storefiles) {
        if (file.getRefCount() == 0) {
            unusedReaderCount++;
        }
    }
    compactedfiles = store.getStoreEngine().getStoreFileManager().getCompactedfiles();
    for (HStoreFile file : compactedfiles) {
        assertEquals("Refcount should be 3", 3, ((HStoreFile) file).getRefCount());
        usedReaderCount++;
    }
    // The newly compacted file will not be used by any scanner
    assertEquals("unused reader count should be 1", 1, unusedReaderCount);
    assertEquals("used reader count should be 3", 3, usedReaderCount);
    // now run the cleaner
    cleaner.chore();
    countDown();
    // No change in the number of store files as none of the compacted files could be cleaned up
    assertEquals(1, store.getStorefilesCount());
    assertEquals(3, ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles().size());
    while (scanCompletedCounter.get() != 3) {
        Thread.sleep(100);
    }
    // reset
    latch = new CountDownLatch(3);
    scanCompletedCounter.set(0);
    counter.set(0);
    // Try creating a new scanner and it should use only the new file created after compaction
    startScannerThreads();
    storefiles = store.getStorefiles();
    usedReaderCount = 0;
    unusedReaderCount = 0;
    for (HStoreFile file : storefiles) {
        if (file.getRefCount() == 3) {
            usedReaderCount++;
        }
    }
    compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    for (HStoreFile file : compactedfiles) {
        assertEquals("Refcount should be 0", 0, file.getRefCount());
        unusedReaderCount++;
    }
    // Though there are files we are not using them for reads
    assertEquals("unused reader count should be 3", 3, unusedReaderCount);
    assertEquals("used reader count should be 1", 1, usedReaderCount);
    countDown();
    while (scanCompletedCounter.get() != 3) {
        Thread.sleep(100);
    }
    // Run the cleaner again
    cleaner.chore();
    // Now the cleaner should be able to clear it up because there are no active readers
    assertEquals(1, store.getStorefilesCount());
    storefiles = store.getStorefiles();
    for (HStoreFile file : storefiles) {
        // Should not be in compacted state
        assertFalse(file.isCompactedAway());
    }
    compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    assertTrue(compactedfiles.isEmpty());
}
Also used : CompactedHFilesDischarger(org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger) HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) CountDownLatch(java.util.concurrent.CountDownLatch) HStore(org.apache.hadoop.hbase.regionserver.HStore) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Example 20 with HStoreFile

use of org.apache.hadoop.hbase.regionserver.HStoreFile in project hbase by apache.

the class TestCompactor method createDummyStoreFile.

public static HStoreFile createDummyStoreFile(long maxSequenceId) throws Exception {
    // "Files" are totally unused, it's Scanner class below that gives compactor fake KVs.
    // But compaction depends on everything under the sun, so stub everything with dummies.
    HStoreFile sf = mock(HStoreFile.class);
    StoreFileReader r = mock(StoreFileReader.class);
    when(r.length()).thenReturn(1L);
    when(r.getBloomFilterType()).thenReturn(BloomType.NONE);
    when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class));
    when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(), anyBoolean())).thenReturn(mock(StoreFileScanner.class));
    when(sf.getReader()).thenReturn(r);
    when(sf.getMaxSequenceId()).thenReturn(maxSequenceId);
    return sf;
}
Also used : HStoreFile(org.apache.hadoop.hbase.regionserver.HStoreFile) StoreFileReader(org.apache.hadoop.hbase.regionserver.StoreFileReader) StoreFileReader(org.apache.hadoop.hbase.regionserver.StoreFileReader) StoreFileScanner(org.apache.hadoop.hbase.regionserver.StoreFileScanner)

Aggregations

HStoreFile (org.apache.hadoop.hbase.regionserver.HStoreFile)44 ArrayList (java.util.ArrayList)18 Test (org.junit.Test)16 Path (org.apache.hadoop.fs.Path)11 Configuration (org.apache.hadoop.conf.Configuration)8 HStore (org.apache.hadoop.hbase.regionserver.HStore)8 StripeInformationProvider (org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider)8 IOException (java.io.IOException)6 OptionalLong (java.util.OptionalLong)6 TableName (org.apache.hadoop.hbase.TableName)5 Put (org.apache.hadoop.hbase.client.Put)5 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)5 FileSystem (org.apache.hadoop.fs.FileSystem)4 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)4 StoreFileReader (org.apache.hadoop.hbase.regionserver.StoreFileReader)4 ImmutableList (org.apache.hbase.thirdparty.com.google.common.collect.ImmutableList)4 InterruptedIOException (java.io.InterruptedIOException)3 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)3 ManualEnvironmentEdge (org.apache.hadoop.hbase.util.ManualEnvironmentEdge)3 FileNotFoundException (java.io.FileNotFoundException)2