Search in sources :

Example 6 with Store

use of org.apache.hadoop.hbase.regionserver.Store in project phoenix by apache.

the class DefaultStatisticsCollector method writeStatistics.

private void writeStatistics(final Region region, boolean delete, List<Mutation> mutations, long currentTime, Scan scan) throws IOException {
    try {
        Set<ImmutableBytesPtr> fams = guidePostsInfoWriterMap.keySet();
        // compaction as guidePostsInfoWriterMap cannot be empty in this case.
        if (cachedGuidePosts == null) {
            // We're either collecting stats for the data table or the local index table, but not both
            // We can determine this based on the column families in the scan being prefixed with the
            // local index column family prefix. We always explicitly specify the local index column
            // families when we're collecting stats for a local index.
            boolean collectingForLocalIndex = scan != null && !scan.getFamilyMap().isEmpty() && MetaDataUtil.isLocalIndexFamily(scan.getFamilyMap().keySet().iterator().next());
            for (Store store : region.getStores()) {
                ImmutableBytesPtr cfKey = new ImmutableBytesPtr(store.getFamily().getName());
                boolean isLocalIndexStore = MetaDataUtil.isLocalIndexFamily(cfKey);
                if (isLocalIndexStore != collectingForLocalIndex) {
                    continue;
                }
                if (!guidePostsInfoWriterMap.containsKey(cfKey)) {
                    Pair<Long, GuidePostsInfoBuilder> emptyGps = new Pair<Long, GuidePostsInfoBuilder>(0l, new GuidePostsInfoBuilder());
                    guidePostsInfoWriterMap.put(cfKey, emptyGps);
                }
            }
        }
        for (ImmutableBytesPtr fam : fams) {
            if (delete) {
                if (logger.isDebugEnabled()) {
                    logger.debug("Deleting the stats for the region " + region.getRegionInfo());
                }
                statsWriter.deleteStatsForRegion(region, this, fam, mutations);
            }
            if (logger.isDebugEnabled()) {
                logger.debug("Adding new stats for the region " + region.getRegionInfo());
            }
            // If we've disabled stats, don't write any, just delete them
            if (this.guidePostDepth > 0) {
                statsWriter.addStats(this, fam, mutations);
            }
        }
    } catch (IOException e) {
        logger.error("Failed to update statistics table!", e);
        throw e;
    }
}
Also used : ImmutableBytesPtr(org.apache.phoenix.hbase.index.util.ImmutableBytesPtr) PLong(org.apache.phoenix.schema.types.PLong) Store(org.apache.hadoop.hbase.regionserver.Store) IOException(java.io.IOException) Pair(org.apache.hadoop.hbase.util.Pair)

Example 7 with Store

use of org.apache.hadoop.hbase.regionserver.Store in project hbase by apache.

the class RefreshHFilesEndpoint method refreshHFiles.

@Override
public void refreshHFiles(RpcController controller, RefreshHFilesProtos.RefreshHFilesRequest request, RpcCallback<RefreshHFilesProtos.RefreshHFilesResponse> done) {
    try {
        for (Store store : env.getRegion().getStores()) {
            LOG.debug("Refreshing HFiles for region: " + store.getRegionInfo().getRegionNameAsString() + " and store: " + store.getColumnFamilyName() + "class:" + store.getClass());
            store.refreshStoreFiles();
        }
    } catch (IOException ioe) {
        LOG.error("Exception while trying to refresh store files: ", ioe);
        CoprocessorRpcUtils.setControllerException(controller, ioe);
    }
    done.run(RefreshHFilesProtos.RefreshHFilesResponse.getDefaultInstance());
}
Also used : Store(org.apache.hadoop.hbase.regionserver.Store) IOException(java.io.IOException)

Example 8 with Store

use of org.apache.hadoop.hbase.regionserver.Store in project hbase by apache.

the class TestCloseChecker method testIsClosed.

@Test
public void testIsClosed() {
    Store enableWrite = mock(Store.class);
    when(enableWrite.areWritesEnabled()).thenReturn(true);
    Store disableWrite = mock(Store.class);
    when(disableWrite.areWritesEnabled()).thenReturn(false);
    Configuration conf = new Configuration();
    long currentTime = EnvironmentEdgeManager.currentTime();
    conf.setInt(SIZE_LIMIT_KEY, 10);
    conf.setLong(TIME_LIMIT_KEY, 10);
    CloseChecker closeChecker = new CloseChecker(conf, currentTime);
    assertFalse(closeChecker.isTimeLimit(enableWrite, currentTime));
    assertFalse(closeChecker.isSizeLimit(enableWrite, 10L));
    closeChecker = new CloseChecker(conf, currentTime);
    assertFalse(closeChecker.isTimeLimit(enableWrite, currentTime + 11));
    assertFalse(closeChecker.isSizeLimit(enableWrite, 11L));
    closeChecker = new CloseChecker(conf, currentTime);
    assertTrue(closeChecker.isTimeLimit(disableWrite, currentTime + 11));
    assertTrue(closeChecker.isSizeLimit(disableWrite, 11L));
    for (int i = 0; i < 10; i++) {
        int plusTime = 5 * i;
        assertFalse(closeChecker.isTimeLimit(enableWrite, currentTime + plusTime));
        assertFalse(closeChecker.isSizeLimit(enableWrite, 5L));
    }
    closeChecker = new CloseChecker(conf, currentTime);
    assertFalse(closeChecker.isTimeLimit(disableWrite, currentTime + 6));
    assertFalse(closeChecker.isSizeLimit(disableWrite, 6));
    assertTrue(closeChecker.isTimeLimit(disableWrite, currentTime + 12));
    assertTrue(closeChecker.isSizeLimit(disableWrite, 6));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Store(org.apache.hadoop.hbase.regionserver.Store) Test(org.junit.Test)

Example 9 with Store

use of org.apache.hadoop.hbase.regionserver.Store in project hbase by apache.

the class TestFileSystemUtilizationChore method mockRegionWithSize.

/**
 * Creates a region with a number of Stores equal to the length of {@code storeSizes}. Each
 * {@link Store} will have a reported size corresponding to the element in {@code storeSizes}.
 *
 * @param storeSizes A list of sizes for each Store.
 * @return A mocked Region.
 */
private Region mockRegionWithSize(Collection<Long> storeSizes) {
    final Region r = mock(Region.class);
    final RegionInfo info = mock(RegionInfo.class);
    when(r.getRegionInfo()).thenReturn(info);
    List<Store> stores = new ArrayList<>();
    when(r.getStores()).thenReturn((List) stores);
    for (Long storeSize : storeSizes) {
        final Store s = mock(Store.class);
        stores.add(s);
        when(s.getHFilesSize()).thenReturn(storeSize);
    }
    return r;
}
Also used : ArrayList(java.util.ArrayList) Region(org.apache.hadoop.hbase.regionserver.Region) Store(org.apache.hadoop.hbase.regionserver.Store) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo)

Example 10 with Store

use of org.apache.hadoop.hbase.regionserver.Store in project hbase by apache.

the class TestFileSystemUtilizationChore method mockRegionWithHFileLinks.

private Region mockRegionWithHFileLinks(Collection<Long> storeSizes, Collection<Long> hfileSizes) {
    final Region r = mock(Region.class);
    final RegionInfo info = mock(RegionInfo.class);
    when(r.getRegionInfo()).thenReturn(info);
    List<Store> stores = new ArrayList<>();
    when(r.getStores()).thenReturn((List) stores);
    assertEquals("Logic error, storeSizes and linkSizes must be equal in size", storeSizes.size(), hfileSizes.size());
    Iterator<Long> storeSizeIter = storeSizes.iterator();
    Iterator<Long> hfileSizeIter = hfileSizes.iterator();
    while (storeSizeIter.hasNext() && hfileSizeIter.hasNext()) {
        final long storeSize = storeSizeIter.next();
        final long hfileSize = hfileSizeIter.next();
        final Store s = mock(Store.class);
        stores.add(s);
        when(s.getStorefilesSize()).thenReturn(storeSize);
        when(s.getHFilesSize()).thenReturn(hfileSize);
    }
    return r;
}
Also used : ArrayList(java.util.ArrayList) Region(org.apache.hadoop.hbase.regionserver.Region) Store(org.apache.hadoop.hbase.regionserver.Store) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo)

Aggregations

Store (org.apache.hadoop.hbase.regionserver.Store)21 Region (org.apache.hadoop.hbase.regionserver.Region)7 Test (org.junit.Test)5 ArrayList (java.util.ArrayList)4 Configuration (org.apache.hadoop.conf.Configuration)4 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)4 InternalScanner (org.apache.hadoop.hbase.regionserver.InternalScanner)4 List (java.util.List)3 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)3 RegionCoprocessorEnvironment (org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment)3 Test (org.junit.jupiter.api.Test)3 IOException (java.io.IOException)2 RegionTooBusyException (org.apache.hadoop.hbase.RegionTooBusyException)2 TableName (org.apache.hadoop.hbase.TableName)2 Table (org.apache.hadoop.hbase.client.Table)2 BlockCache (org.apache.hadoop.hbase.io.hfile.BlockCache)2 CacheConfig (org.apache.hadoop.hbase.io.hfile.CacheConfig)2 StoreScanner (uk.gov.gchq.gaffer.hbasestore.coprocessor.scanner.StoreScanner)2 HashMap (java.util.HashMap)1 Map (java.util.Map)1