use of org.apache.hadoop.hbase.Size in project hbase by apache.
the class TestUtils method createDummyClusterMetrics.
public static ClusterMetrics createDummyClusterMetrics() {
Map<ServerName, ServerMetrics> serverMetricsMap = new HashMap<>();
// host1
List<RegionMetrics> regionMetricsList = new ArrayList<>();
List<UserMetrics> userMetricsList = new ArrayList<>();
userMetricsList.add(createUserMetrics("FOO", 1, 2, 4));
userMetricsList.add(createUserMetrics("BAR", 2, 3, 3));
regionMetricsList.add(createRegionMetrics("table1,,1.00000000000000000000000000000000.", 100, 50, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, new Size(100, Size.Unit.MEGABYTE), 0.1f, 100, 100, "2019-07-22 00:00:00"));
regionMetricsList.add(createRegionMetrics("table2,1,2.00000000000000000000000000000001.", 200, 100, 200, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 0.2f, 50, 200, "2019-07-22 00:00:01"));
regionMetricsList.add(createRegionMetrics("namespace:table3,,3_0001.00000000000000000000000000000002.", 300, 150, 300, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, new Size(300, Size.Unit.MEGABYTE), 0.3f, 100, 300, "2019-07-22 00:00:02"));
ServerName host1 = ServerName.valueOf("host1.apache.com", 1000, 1);
serverMetricsMap.put(host1, createServerMetrics(host1, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 100, regionMetricsList, userMetricsList));
// host2
regionMetricsList.clear();
userMetricsList.clear();
userMetricsList.add(createUserMetrics("FOO", 5, 7, 3));
userMetricsList.add(createUserMetrics("BAR", 4, 8, 4));
regionMetricsList.add(createRegionMetrics("table1,1,4.00000000000000000000000000000003.", 100, 50, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, new Size(100, Size.Unit.MEGABYTE), 0.4f, 50, 100, "2019-07-22 00:00:03"));
regionMetricsList.add(createRegionMetrics("table2,,5.00000000000000000000000000000004.", 200, 100, 200, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 0.5f, 150, 200, "2019-07-22 00:00:04"));
regionMetricsList.add(createRegionMetrics("namespace:table3,,6.00000000000000000000000000000005.", 300, 150, 300, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, new Size(300, Size.Unit.MEGABYTE), 0.6f, 200, 300, "2019-07-22 00:00:05"));
ServerName host2 = ServerName.valueOf("host2.apache.com", 1001, 2);
serverMetricsMap.put(host2, createServerMetrics(host2, 200, new Size(16, Size.Unit.GIGABYTE), new Size(32, Size.Unit.GIGABYTE), 200, regionMetricsList, userMetricsList));
ServerName host3 = ServerName.valueOf("host3.apache.com", 1002, 3);
return ClusterMetricsBuilder.newBuilder().setHBaseVersion("3.0.0-SNAPSHOT").setClusterId("01234567-89ab-cdef-0123-456789abcdef").setLiveServerMetrics(serverMetricsMap).setDeadServerNames(Collections.singletonList(host3)).setRegionsInTransition(Collections.singletonList(new RegionState(RegionInfoBuilder.newBuilder(TableName.valueOf("table4")).setStartKey(new byte[0]).setEndKey(new byte[0]).setOffline(true).setReplicaId(0).setRegionId(0).setSplit(false).build(), RegionState.State.OFFLINE, host3))).build();
}
Aggregations