Search in sources :

Example 11 with StoreFile

use of org.apache.hadoop.hbase.regionserver.StoreFile in project hbase by apache.

the class GaussianFileListGenerator method iterator.

@Override
public Iterator<List<StoreFile>> iterator() {
    return new Iterator<List<StoreFile>>() {

        private GaussianRandomGenerator gen = new GaussianRandomGenerator(new MersenneTwister(random.nextInt()));

        private int count = 0;

        @Override
        public boolean hasNext() {
            return count < MAX_FILE_GEN_ITERS;
        }

        @Override
        public List<StoreFile> next() {
            count += 1;
            ArrayList<StoreFile> files = new ArrayList<>(NUM_FILES_GEN);
            for (int i = 0; i < NUM_FILES_GEN; i++) {
                files.add(createMockStoreFile((int) Math.ceil(Math.max(0, gen.nextNormalizedDouble() * 32 + 32))));
            }
            return files;
        }

        @Override
        public void remove() {
        }
    };
}
Also used : GaussianRandomGenerator(org.apache.commons.math.random.GaussianRandomGenerator) Iterator(java.util.Iterator) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) ArrayList(java.util.ArrayList) MersenneTwister(org.apache.commons.math.random.MersenneTwister)

Example 12 with StoreFile

use of org.apache.hadoop.hbase.regionserver.StoreFile in project hbase by apache.

the class MockStoreFileGenerator method createMockStoreFile.

protected StoreFile createMockStoreFile(final long sizeInBytes, final long seqId) {
    StoreFile mockSf = mock(StoreFile.class);
    StoreFileReader reader = mock(StoreFileReader.class);
    String stringPath = "/hbase/testTable/regionA/" + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random);
    Path path = new Path(stringPath);
    when(reader.getSequenceID()).thenReturn(seqId);
    when(reader.getTotalUncompressedBytes()).thenReturn(sizeInBytes);
    when(reader.length()).thenReturn(sizeInBytes);
    when(mockSf.getPath()).thenReturn(path);
    when(mockSf.excludeFromMinorCompaction()).thenReturn(false);
    // TODO come back to
    when(mockSf.isReference()).thenReturn(false);
    // this when selection takes this into account
    when(mockSf.getReader()).thenReturn(reader);
    String toString = Objects.toStringHelper("MockStoreFile").add("isReference", false).add("fileSize", StringUtils.humanReadableInt(sizeInBytes)).add("seqId", seqId).add("path", stringPath).toString();
    when(mockSf.toString()).thenReturn(toString);
    return mockSf;
}
Also used : Path(org.apache.hadoop.fs.Path) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) StoreFileReader(org.apache.hadoop.hbase.regionserver.StoreFileReader)

Example 13 with StoreFile

use of org.apache.hadoop.hbase.regionserver.StoreFile in project hbase by apache.

the class PerfTestCompactionPolicies method runIteration.

private List<StoreFile> runIteration(List<StoreFile> startingStoreFiles) throws IOException {
    List<StoreFile> storeFiles = new ArrayList<>(startingStoreFiles);
    CompactionRequest req = cp.selectCompaction(storeFiles, new ArrayList<>(), false, false, false);
    long newFileSize = 0;
    Collection<StoreFile> filesToCompact = req.getFiles();
    if (!filesToCompact.isEmpty()) {
        storeFiles = new ArrayList<>(storeFiles);
        storeFiles.removeAll(filesToCompact);
        for (StoreFile storeFile : filesToCompact) {
            newFileSize += storeFile.getReader().length();
        }
        storeFiles.add(createMockStoreFileBytes(newFileSize));
    }
    written += newFileSize;
    return storeFiles;
}
Also used : ArrayList(java.util.ArrayList) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile)

Example 14 with StoreFile

use of org.apache.hadoop.hbase.regionserver.StoreFile in project hbase by apache.

the class PerfTestCompactionPolicies method testSelection.

@Test
public final void testSelection() throws Exception {
    long fileDiff = 0;
    for (List<StoreFile> storeFileList : generator) {
        List<StoreFile> currentFiles = new ArrayList<>(18);
        for (StoreFile file : storeFileList) {
            currentFiles.add(file);
            currentFiles = runIteration(currentFiles);
        }
        fileDiff += (storeFileList.size() - currentFiles.size());
    }
    // print out tab delimited so that it can be used in excel/gdocs.
    System.out.println(cp.getClass().getSimpleName() + "\t" + fileGenClass.getSimpleName() + "\t" + max + "\t" + min + "\t" + ratio + "\t" + written + "\t" + fileDiff);
}
Also used : StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) ArrayList(java.util.ArrayList) Test(org.junit.Test)

Example 15 with StoreFile

use of org.apache.hadoop.hbase.regionserver.StoreFile in project hbase by apache.

the class TestCompactedHFilesDischarger method testCleanerWithParallelScanners.

@Test
public void testCleanerWithParallelScanners() throws Exception {
    // Create the cleaner object
    CompactedHFilesDischarger cleaner = new CompactedHFilesDischarger(1000, (Stoppable) null, rss, false);
    // Add some data to the region and do some flushes
    for (int i = 1; i < 10; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    for (int i = 11; i < 20; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    for (int i = 21; i < 30; i++) {
        Put p = new Put(Bytes.toBytes("row" + i));
        p.addColumn(fam, qual1, val);
        region.put(p);
    }
    // flush them
    region.flush(true);
    Store store = region.getStore(fam);
    assertEquals(3, store.getStorefilesCount());
    Collection<StoreFile> storefiles = store.getStorefiles();
    Collection<StoreFile> compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    // None of the files should be in compacted state.
    for (StoreFile file : storefiles) {
        assertFalse(file.isCompactedAway());
    }
    startScannerThreads();
    // Do compaction
    region.compact(true);
    storefiles = store.getStorefiles();
    int usedReaderCount = 0;
    int unusedReaderCount = 0;
    for (StoreFile file : storefiles) {
        if (file.getRefCount() == 0) {
            unusedReaderCount++;
        }
    }
    compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    for (StoreFile file : compactedfiles) {
        assertEquals("Refcount should be 3", 3, file.getRefCount());
        usedReaderCount++;
    }
    // The newly compacted file will not be used by any scanner
    assertEquals("unused reader count should be 1", 1, unusedReaderCount);
    assertEquals("used reader count should be 3", 3, usedReaderCount);
    // now run the cleaner
    cleaner.chore();
    countDown();
    // No change in the number of store files as none of the compacted files could be cleaned up
    assertEquals(1, store.getStorefilesCount());
    assertEquals(3, ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles().size());
    while (scanCompletedCounter.get() != 3) {
        Thread.sleep(100);
    }
    // reset
    latch = new CountDownLatch(3);
    scanCompletedCounter.set(0);
    counter.set(0);
    // Try creating a new scanner and it should use only the new file created after compaction
    startScannerThreads();
    storefiles = store.getStorefiles();
    usedReaderCount = 0;
    unusedReaderCount = 0;
    for (StoreFile file : storefiles) {
        if (file.getRefCount() == 3) {
            usedReaderCount++;
        }
    }
    compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    for (StoreFile file : compactedfiles) {
        assertEquals("Refcount should be 0", 0, file.getRefCount());
        unusedReaderCount++;
    }
    // Though there are files we are not using them for reads
    assertEquals("unused reader count should be 3", 3, unusedReaderCount);
    assertEquals("used reader count should be 1", 1, usedReaderCount);
    countDown();
    while (scanCompletedCounter.get() != 3) {
        Thread.sleep(100);
    }
    // Run the cleaner again
    cleaner.chore();
    // Now the cleaner should be able to clear it up because there are no active readers
    assertEquals(1, store.getStorefilesCount());
    storefiles = store.getStorefiles();
    for (StoreFile file : storefiles) {
        // Should not be in compacted state
        assertFalse(file.isCompactedAway());
    }
    compactedfiles = ((HStore) store).getStoreEngine().getStoreFileManager().getCompactedfiles();
    assertTrue(compactedfiles.isEmpty());
}
Also used : CompactedHFilesDischarger(org.apache.hadoop.hbase.regionserver.CompactedHFilesDischarger) StoreFile(org.apache.hadoop.hbase.regionserver.StoreFile) HStore(org.apache.hadoop.hbase.regionserver.HStore) Store(org.apache.hadoop.hbase.regionserver.Store) CountDownLatch(java.util.concurrent.CountDownLatch) HStore(org.apache.hadoop.hbase.regionserver.HStore) Put(org.apache.hadoop.hbase.client.Put) Test(org.junit.Test)

Aggregations

StoreFile (org.apache.hadoop.hbase.regionserver.StoreFile)52 ArrayList (java.util.ArrayList)22 Path (org.apache.hadoop.fs.Path)15 Test (org.junit.Test)13 IOException (java.io.IOException)10 Store (org.apache.hadoop.hbase.regionserver.Store)6 StripeInformationProvider (org.apache.hadoop.hbase.regionserver.compactions.StripeCompactionPolicy.StripeInformationProvider)6 StoreFileReader (org.apache.hadoop.hbase.regionserver.StoreFileReader)5 ImmutableList (com.google.common.collect.ImmutableList)4 Configuration (org.apache.hadoop.conf.Configuration)4 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)4 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)4 Put (org.apache.hadoop.hbase.client.Put)4 StoreFileScanner (org.apache.hadoop.hbase.regionserver.StoreFileScanner)4 FileStatus (org.apache.hadoop.fs.FileStatus)3 Cell (org.apache.hadoop.hbase.Cell)3 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)3 StoreFileWriter (org.apache.hadoop.hbase.regionserver.StoreFileWriter)3 ConcatenatedLists (org.apache.hadoop.hbase.util.ConcatenatedLists)3 FileNotFoundException (java.io.FileNotFoundException)2