use of org.apache.hadoop.hbase.UserMetrics in project hbase by apache.
the class TestRegionsRecoveryChore method getServerMetrics.
private static ServerMetrics getServerMetrics(int noOfRegions) {
ServerMetrics serverMetrics = new ServerMetrics() {
@Override
public ServerName getServerName() {
return null;
}
@Override
public long getRequestCountPerSecond() {
return 0;
}
@Override
public long getRequestCount() {
return 0;
}
@Override
public long getReadRequestsCount() {
return 0;
}
@Override
public long getWriteRequestsCount() {
return 0;
}
@Override
public Size getUsedHeapSize() {
return null;
}
@Override
public Size getMaxHeapSize() {
return null;
}
@Override
public int getInfoServerPort() {
return 0;
}
@Override
public List<ReplicationLoadSource> getReplicationLoadSourceList() {
return null;
}
@Override
public Map<String, List<ReplicationLoadSource>> getReplicationLoadSourceMap() {
return null;
}
@Nullable
@Override
public ReplicationLoadSink getReplicationLoadSink() {
return null;
}
@Override
public Map<byte[], RegionMetrics> getRegionMetrics() {
Map<byte[], RegionMetrics> regionMetricsMap = new HashMap<>();
for (int i = 0; i < noOfRegions; i++) {
byte[] regionName = Bytes.toBytes("region" + regionNo + "_" + i);
regionMetricsMap.put(regionName, TestRegionsRecoveryChore.getRegionMetrics(regionName, 100 * i));
++regionNo;
}
return regionMetricsMap;
}
@Override
public Map<byte[], UserMetrics> getUserMetrics() {
return new HashMap<>();
}
@Override
public Set<String> getCoprocessorNames() {
return null;
}
@Override
public long getReportTimestamp() {
return 0;
}
@Override
public long getLastReportTimestamp() {
return 0;
}
@Override
public List<ServerTask> getTasks() {
return null;
}
};
return serverMetrics;
}
use of org.apache.hadoop.hbase.UserMetrics in project hbase by apache.
the class TestUtils method createDummyClusterMetrics.
public static ClusterMetrics createDummyClusterMetrics() {
Map<ServerName, ServerMetrics> serverMetricsMap = new HashMap<>();
// host1
List<RegionMetrics> regionMetricsList = new ArrayList<>();
List<UserMetrics> userMetricsList = new ArrayList<>();
userMetricsList.add(createUserMetrics("FOO", 1, 2, 4));
userMetricsList.add(createUserMetrics("BAR", 2, 3, 3));
regionMetricsList.add(createRegionMetrics("table1,,1.00000000000000000000000000000000.", 100, 50, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, new Size(100, Size.Unit.MEGABYTE), 0.1f, 100, 100, "2019-07-22 00:00:00"));
regionMetricsList.add(createRegionMetrics("table2,1,2.00000000000000000000000000000001.", 200, 100, 200, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 0.2f, 50, 200, "2019-07-22 00:00:01"));
regionMetricsList.add(createRegionMetrics("namespace:table3,,3_0001.00000000000000000000000000000002.", 300, 150, 300, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, new Size(300, Size.Unit.MEGABYTE), 0.3f, 100, 300, "2019-07-22 00:00:02"));
ServerName host1 = ServerName.valueOf("host1.apache.com", 1000, 1);
serverMetricsMap.put(host1, createServerMetrics(host1, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 100, regionMetricsList, userMetricsList));
// host2
regionMetricsList.clear();
userMetricsList.clear();
userMetricsList.add(createUserMetrics("FOO", 5, 7, 3));
userMetricsList.add(createUserMetrics("BAR", 4, 8, 4));
regionMetricsList.add(createRegionMetrics("table1,1,4.00000000000000000000000000000003.", 100, 50, 100, new Size(100, Size.Unit.MEGABYTE), new Size(200, Size.Unit.MEGABYTE), 1, new Size(100, Size.Unit.MEGABYTE), 0.4f, 50, 100, "2019-07-22 00:00:03"));
regionMetricsList.add(createRegionMetrics("table2,,5.00000000000000000000000000000004.", 200, 100, 200, new Size(200, Size.Unit.MEGABYTE), new Size(400, Size.Unit.MEGABYTE), 2, new Size(200, Size.Unit.MEGABYTE), 0.5f, 150, 200, "2019-07-22 00:00:04"));
regionMetricsList.add(createRegionMetrics("namespace:table3,,6.00000000000000000000000000000005.", 300, 150, 300, new Size(300, Size.Unit.MEGABYTE), new Size(600, Size.Unit.MEGABYTE), 3, new Size(300, Size.Unit.MEGABYTE), 0.6f, 200, 300, "2019-07-22 00:00:05"));
ServerName host2 = ServerName.valueOf("host2.apache.com", 1001, 2);
serverMetricsMap.put(host2, createServerMetrics(host2, 200, new Size(16, Size.Unit.GIGABYTE), new Size(32, Size.Unit.GIGABYTE), 200, regionMetricsList, userMetricsList));
ServerName host3 = ServerName.valueOf("host3.apache.com", 1002, 3);
return ClusterMetricsBuilder.newBuilder().setHBaseVersion("3.0.0-SNAPSHOT").setClusterId("01234567-89ab-cdef-0123-456789abcdef").setLiveServerMetrics(serverMetricsMap).setDeadServerNames(Collections.singletonList(host3)).setRegionsInTransition(Collections.singletonList(new RegionState(RegionInfoBuilder.newBuilder(TableName.valueOf("table4")).setStartKey(new byte[0]).setEndKey(new byte[0]).setOffline(true).setReplicaId(0).setRegionId(0).setSplit(false).build(), RegionState.State.OFFLINE, host3))).build();
}
Aggregations