use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.
the class TestDataNodeMetrics method testDatanodeActiveXceiversCount.
@Test
public void testDatanodeActiveXceiversCount() throws Exception {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
FileSystem fs = cluster.getFileSystem();
List<DataNode> datanodes = cluster.getDataNodes();
assertEquals(datanodes.size(), 1);
DataNode datanode = datanodes.get(0);
MetricsRecordBuilder rb = getMetrics(datanode.getMetrics().name());
long dataNodeActiveXceiversCount = MetricsAsserts.getIntGauge("DataNodeActiveXceiversCount", rb);
assertEquals(dataNodeActiveXceiversCount, 0);
Path path = new Path("/counter.txt");
DFSTestUtil.createFile(fs, path, 204800000, (short) 3, Time.monotonicNow());
MetricsRecordBuilder rbNew = getMetrics(datanode.getMetrics().name());
dataNodeActiveXceiversCount = MetricsAsserts.getIntGauge("DataNodeActiveXceiversCount", rbNew);
assertTrue(dataNodeActiveXceiversCount >= 0);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.
the class TestDataNodeMetrics method testDatanodeBlocksReplicatedMetric.
@Test
public void testDatanodeBlocksReplicatedMetric() throws Exception {
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
FileSystem fs = cluster.getFileSystem();
List<DataNode> datanodes = cluster.getDataNodes();
assertEquals(datanodes.size(), 1);
DataNode datanode = datanodes.get(0);
MetricsRecordBuilder rb = getMetrics(datanode.getMetrics().name());
long blocksReplicated = getLongCounter("BlocksReplicated", rb);
assertEquals("No blocks replicated yet", 0, blocksReplicated);
Path path = new Path("/counter.txt");
DFSTestUtil.createFile(fs, path, 1024, (short) 2, Time.monotonicNow());
cluster.startDataNodes(conf, 1, true, StartupOption.REGULAR, null);
ExtendedBlock firstBlock = DFSTestUtil.getFirstBlock(fs, path);
DFSTestUtil.waitForReplication(cluster, firstBlock, 1, 2, 0);
MetricsRecordBuilder rbNew = getMetrics(datanode.getMetrics().name());
blocksReplicated = getLongCounter("BlocksReplicated", rbNew);
assertEquals("blocks replicated counter incremented", 1, blocksReplicated);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.
the class TestDataNodeMetrics method testDataNodeMetrics.
@Test
public void testDataNodeMetrics() throws Exception {
Configuration conf = new HdfsConfiguration();
SimulatedFSDataset.setFactory(conf);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
try {
FileSystem fs = cluster.getFileSystem();
final long LONG_FILE_LEN = Integer.MAX_VALUE + 1L;
DFSTestUtil.createFile(fs, new Path("/tmp.txt"), LONG_FILE_LEN, (short) 1, 1L);
List<DataNode> datanodes = cluster.getDataNodes();
assertEquals(datanodes.size(), 1);
DataNode datanode = datanodes.get(0);
MetricsRecordBuilder rb = getMetrics(datanode.getMetrics().name());
assertCounter("BytesWritten", LONG_FILE_LEN, rb);
assertTrue("Expected non-zero number of incremental block reports", getLongCounter("IncrementalBlockReportsNumOps", rb) > 0);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.
the class TestDataNodeErasureCodingMetrics method getLongMetric.
private long getLongMetric(String metricName) {
long metricValue = 0;
// Add all reconstruction metric value from all data nodes
for (DataNode dn : cluster.getDataNodes()) {
MetricsRecordBuilder rb = getMetrics(dn.getMetrics().name());
metricValue += getLongCounter(metricName, rb);
}
return metricValue;
}
use of org.apache.hadoop.metrics2.MetricsRecordBuilder in project hadoop by apache.
the class TestDataNodeErasureCodingMetrics method getLongMetricWithoutCheck.
private long getLongMetricWithoutCheck(String metricName) {
long metricValue = 0;
// Add all reconstruction metric value from all data nodes
for (DataNode dn : cluster.getDataNodes()) {
MetricsRecordBuilder rb = getMetrics(dn.getMetrics().name());
metricValue += getLongCounterWithoutCheck(metricName, rb);
}
return metricValue;
}
Aggregations