Search in sources :

Example 6 with Size

use of org.apache.hadoop.hbase.Size in project hbase by apache.

the class TestSimpleRegionNormalizer method setupMocksForNormalizer.

@SuppressWarnings("MockitoCast")
private void setupMocksForNormalizer(Map<byte[], Integer> regionSizes, List<RegionInfo> regionInfoList) {
    masterServices = Mockito.mock(MasterServices.class, RETURNS_DEEP_STUBS);
    tableDescriptor = Mockito.mock(TableDescriptor.class, RETURNS_DEEP_STUBS);
    // for simplicity all regions are assumed to be on one server; doesn't matter to us
    ServerName sn = ServerName.valueOf("localhost", 0, 0L);
    when(masterServices.getAssignmentManager().getRegionStates().getRegionsOfTable(any())).thenReturn(regionInfoList);
    when(masterServices.getAssignmentManager().getRegionStates().getRegionServerOfRegion(any())).thenReturn(sn);
    when(masterServices.getAssignmentManager().getRegionStates().getRegionState(any(RegionInfo.class))).thenReturn(RegionState.createForTesting(null, RegionState.State.OPEN));
    for (Map.Entry<byte[], Integer> region : regionSizes.entrySet()) {
        RegionMetrics regionLoad = Mockito.mock(RegionMetrics.class);
        when(regionLoad.getRegionName()).thenReturn(region.getKey());
        when(regionLoad.getStoreFileSize()).thenReturn(new Size(region.getValue(), Size.Unit.MEGABYTE));
        // this is possibly broken with jdk9, unclear if false positive or not
        // suppress it for now, fix it when we get to running tests on 9
        // see: http://errorprone.info/bugpattern/MockitoCast
        when((Object) masterServices.getServerManager().getLoad(sn).getRegionMetrics().get(region.getKey())).thenReturn(regionLoad);
    }
    when(masterServices.isSplitOrMergeEnabled(any())).thenReturn(true);
    when(tableDescriptor.getTableName()).thenReturn(name.getTableName());
    normalizer = new SimpleRegionNormalizer();
    normalizer.setConf(conf);
    normalizer.setMasterServices(masterServices);
}
Also used : Size(org.apache.hadoop.hbase.Size) ServerName(org.apache.hadoop.hbase.ServerName) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) MasterServices(org.apache.hadoop.hbase.master.MasterServices) Map(java.util.Map) HashMap(java.util.HashMap) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) RegionMetrics(org.apache.hadoop.hbase.RegionMetrics)

Example 7 with Size

use of org.apache.hadoop.hbase.Size in project hbase by apache.

the class TestSimpleRegionNormalizerOnCluster method createTableBegsSplit.

/**
 * create a table with 5 regions, having region sizes so as to provoke a split
 * of the largest region.
 * <ul>
 *   <li>total table size: 12</li>
 *   <li>average region size: 2.4</li>
 *   <li>split threshold: 2.4 * 2 = 4.8</li>
 * </ul>
 */
private static int createTableBegsSplit(final TableName tableName, final boolean normalizerEnabled, final boolean isMergeEnabled) throws Exception {
    final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 2, 3, 5);
    assertEquals(5, getRegionCount(tableName));
    admin.flush(tableName).get();
    final TableDescriptor td = TableDescriptorBuilder.newBuilder(admin.getDescriptor(tableName).get()).setNormalizationEnabled(normalizerEnabled).setMergeEnabled(isMergeEnabled).build();
    admin.modifyTable(td).get();
    // make sure relatively accurate region statistics are available for the test table. use
    // the last/largest region as clue.
    TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new ExplainingPredicate<IOException>() {

        @Override
        public String explainFailure() {
            return "expected largest region to be >= 4mb.";
        }

        @Override
        public boolean evaluate() {
            return generatedRegions.stream().mapToDouble(val -> getRegionSizeMB(master, val.getRegionInfo())).allMatch(val -> val > 0) && getRegionSizeMB(master, generatedRegions.get(4).getRegionInfo()) >= 4.0;
        }
    });
    return 5;
}
Also used : NormalizeTableFilterParams(org.apache.hadoop.hbase.client.NormalizeTableFilterParams) Matchers.not(org.hamcrest.Matchers.not) LoggerFactory(org.slf4j.LoggerFactory) Matchers.comparesEqualTo(org.hamcrest.Matchers.comparesEqualTo) MasterServices(org.apache.hadoop.hbase.master.MasterServices) ClassRule(org.junit.ClassRule) AfterClass(org.junit.AfterClass) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Matchers.lessThanOrEqualTo(org.hamcrest.Matchers.lessThanOrEqualTo) AsyncAdmin(org.apache.hadoop.hbase.client.AsyncAdmin) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) Category(org.junit.experimental.categories.Category) List(java.util.List) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Region(org.apache.hadoop.hbase.regionserver.Region) TestNamespaceAuditor(org.apache.hadoop.hbase.namespace.TestNamespaceAuditor) Assert.assertFalse(org.junit.Assert.assertFalse) MatcherPredicate(org.apache.hadoop.hbase.MatcherPredicate) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) HMaster(org.apache.hadoop.hbase.master.HMaster) BeforeClass(org.junit.BeforeClass) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) ArrayList(java.util.ArrayList) HConstants(org.apache.hadoop.hbase.HConstants) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) TestName(org.junit.rules.TestName) QuotaUtil(org.apache.hadoop.hbase.quotas.QuotaUtil) ServerName(org.apache.hadoop.hbase.ServerName) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) Matchers.greaterThanOrEqualTo(org.hamcrest.Matchers.greaterThanOrEqualTo) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) TableNamespaceManager(org.apache.hadoop.hbase.master.TableNamespaceManager) Assert.assertNotNull(org.junit.Assert.assertNotNull) MediumTests(org.apache.hadoop.hbase.testclassification.MediumTests) Put(org.apache.hadoop.hbase.client.Put) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Assert.assertTrue(org.junit.Assert.assertTrue) Matchers(org.hamcrest.Matchers) IOException(java.io.IOException) Test(org.junit.Test) PlanType(org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType) RegionMetrics(org.apache.hadoop.hbase.RegionMetrics) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) Size(org.apache.hadoop.hbase.Size) LoadTestKVGenerator(org.apache.hadoop.hbase.util.LoadTestKVGenerator) MasterTests(org.apache.hadoop.hbase.testclassification.MasterTests) Matcher(org.hamcrest.Matcher) Table(org.apache.hadoop.hbase.client.Table) ExplainingPredicate(org.apache.hadoop.hbase.Waiter.ExplainingPredicate) Comparator(java.util.Comparator) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) Assert.assertEquals(org.junit.Assert.assertEquals) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) IOException(java.io.IOException) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 8 with Size

use of org.apache.hadoop.hbase.Size in project hbase by apache.

the class TestSimpleRegionNormalizerOnCluster method createTableBegsMerge.

/**
 * create a table with 5 regions, having region sizes so as to provoke a merge
 * of the smallest regions.
 * <ul>
 *   <li>total table size: 13</li>
 *   <li>average region size: 2.6</li>
 *   <li>sum of sizes of first two regions < average</li>
 * </ul>
 */
private static int createTableBegsMerge(final TableName tableName) throws Exception {
    // create 5 regions with sizes to trigger merge of small regions
    final List<HRegion> generatedRegions = generateTestData(tableName, 1, 1, 3, 3, 5);
    assertEquals(5, getRegionCount(tableName));
    admin.flush(tableName).get();
    final TableDescriptor td = TableDescriptorBuilder.newBuilder(admin.getDescriptor(tableName).get()).setNormalizationEnabled(true).build();
    admin.modifyTable(td).get();
    // make sure relatively accurate region statistics are available for the test table. use
    // the last/largest region as clue.
    LOG.debug("waiting for region statistics to settle.");
    TEST_UTIL.waitFor(TimeUnit.MINUTES.toMillis(1), new ExplainingPredicate<IOException>() {

        @Override
        public String explainFailure() {
            return "expected largest region to be >= 4mb.";
        }

        @Override
        public boolean evaluate() {
            return generatedRegions.stream().mapToDouble(val -> getRegionSizeMB(master, val.getRegionInfo())).allMatch(val -> val > 0) && getRegionSizeMB(master, generatedRegions.get(4).getRegionInfo()) >= 4.0;
        }
    });
    return 5;
}
Also used : NormalizeTableFilterParams(org.apache.hadoop.hbase.client.NormalizeTableFilterParams) Matchers.not(org.hamcrest.Matchers.not) LoggerFactory(org.slf4j.LoggerFactory) Matchers.comparesEqualTo(org.hamcrest.Matchers.comparesEqualTo) MasterServices(org.apache.hadoop.hbase.master.MasterServices) ClassRule(org.junit.ClassRule) AfterClass(org.junit.AfterClass) HBaseTestingUtil(org.apache.hadoop.hbase.HBaseTestingUtil) Matchers.lessThanOrEqualTo(org.hamcrest.Matchers.lessThanOrEqualTo) AsyncAdmin(org.apache.hadoop.hbase.client.AsyncAdmin) HBaseClassTestRule(org.apache.hadoop.hbase.HBaseClassTestRule) Category(org.junit.experimental.categories.Category) List(java.util.List) RegionLocator(org.apache.hadoop.hbase.client.RegionLocator) Region(org.apache.hadoop.hbase.regionserver.Region) TestNamespaceAuditor(org.apache.hadoop.hbase.namespace.TestNamespaceAuditor) Assert.assertFalse(org.junit.Assert.assertFalse) MatcherPredicate(org.apache.hadoop.hbase.MatcherPredicate) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) HMaster(org.apache.hadoop.hbase.master.HMaster) BeforeClass(org.junit.BeforeClass) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) ArrayList(java.util.ArrayList) HConstants(org.apache.hadoop.hbase.HConstants) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) TestName(org.junit.rules.TestName) QuotaUtil(org.apache.hadoop.hbase.quotas.QuotaUtil) ServerName(org.apache.hadoop.hbase.ServerName) Bytes(org.apache.hadoop.hbase.util.Bytes) Before(org.junit.Before) Matchers.greaterThanOrEqualTo(org.hamcrest.Matchers.greaterThanOrEqualTo) TableName(org.apache.hadoop.hbase.TableName) Logger(org.slf4j.Logger) TableNamespaceManager(org.apache.hadoop.hbase.master.TableNamespaceManager) Assert.assertNotNull(org.junit.Assert.assertNotNull) MediumTests(org.apache.hadoop.hbase.testclassification.MediumTests) Put(org.apache.hadoop.hbase.client.Put) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) Assert.assertTrue(org.junit.Assert.assertTrue) Matchers(org.hamcrest.Matchers) IOException(java.io.IOException) Test(org.junit.Test) PlanType(org.apache.hadoop.hbase.master.normalizer.NormalizationPlan.PlanType) RegionMetrics(org.apache.hadoop.hbase.RegionMetrics) TimeUnit(java.util.concurrent.TimeUnit) Rule(org.junit.Rule) Size(org.apache.hadoop.hbase.Size) LoadTestKVGenerator(org.apache.hadoop.hbase.util.LoadTestKVGenerator) MasterTests(org.apache.hadoop.hbase.testclassification.MasterTests) Matcher(org.hamcrest.Matcher) Table(org.apache.hadoop.hbase.client.Table) ExplainingPredicate(org.apache.hadoop.hbase.Waiter.ExplainingPredicate) Comparator(java.util.Comparator) Collections(java.util.Collections) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) Assert.assertEquals(org.junit.Assert.assertEquals) HRegion(org.apache.hadoop.hbase.regionserver.HRegion) IOException(java.io.IOException) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 9 with Size

use of org.apache.hadoop.hbase.Size in project hbase by apache.

the class TestRegionSizeCalculator method mockRegion.

/**
 * Creates mock of region with given name and size.
 *
 * @param  fileSizeMb number of megabytes occupied by region in file store in megabytes
 */
private RegionMetrics mockRegion(String regionName, int fileSizeMb) {
    RegionMetrics region = Mockito.mock(RegionMetrics.class);
    when(region.getRegionName()).thenReturn(Bytes.toBytes(regionName));
    when(region.getNameAsString()).thenReturn(regionName);
    when(region.getStoreFileSize()).thenReturn(new Size(fileSizeMb, Size.Unit.MEGABYTE));
    return region;
}
Also used : Size(org.apache.hadoop.hbase.Size) RegionMetrics(org.apache.hadoop.hbase.RegionMetrics)

Example 10 with Size

use of org.apache.hadoop.hbase.Size in project hbase by apache.

the class FieldValue method plus.

public FieldValue plus(FieldValue o) {
    if (type != o.type) {
        throw new IllegalArgumentException("invalid type");
    }
    switch(type) {
        case STRING:
            return new FieldValue(((String) value).concat((String) o.value), type);
        case INTEGER:
            return new FieldValue(((Integer) value) + ((Integer) o.value), type);
        case LONG:
            return new FieldValue(((Long) value) + ((Long) o.value), type);
        case FLOAT:
        case PERCENT:
            return new FieldValue(((Float) value) + ((Float) o.value), type);
        case SIZE:
            Size size = (Size) value;
            Size oSize = (Size) o.value;
            Size.Unit unit = size.getUnit();
            return new FieldValue(new Size(size.get(unit) + oSize.get(unit), unit), type);
        default:
            throw new AssertionError();
    }
}
Also used : Size(org.apache.hadoop.hbase.Size)

Aggregations

Size (org.apache.hadoop.hbase.Size)11 Test (org.junit.Test)7 RegionMetrics (org.apache.hadoop.hbase.RegionMetrics)6 ServerName (org.apache.hadoop.hbase.ServerName)5 ArrayList (java.util.ArrayList)3 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)3 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)3 MasterServices (org.apache.hadoop.hbase.master.MasterServices)3 IOException (java.io.IOException)2 Collections (java.util.Collections)2 Comparator (java.util.Comparator)2 List (java.util.List)2 TimeUnit (java.util.concurrent.TimeUnit)2 HBaseClassTestRule (org.apache.hadoop.hbase.HBaseClassTestRule)2 HBaseTestingUtil (org.apache.hadoop.hbase.HBaseTestingUtil)2 HConstants (org.apache.hadoop.hbase.HConstants)2 MatcherPredicate (org.apache.hadoop.hbase.MatcherPredicate)2 NamespaceDescriptor (org.apache.hadoop.hbase.NamespaceDescriptor)2 TableName (org.apache.hadoop.hbase.TableName)2 ExplainingPredicate (org.apache.hadoop.hbase.Waiter.ExplainingPredicate)2