use of org.apache.hadoop.metrics2.impl.MetricsCollectorImpl in project hadoop by apache.
the class TestMetricsSourceAdapter method testGetMetricsAndJmx.
@Test
public void testGetMetricsAndJmx() throws Exception {
// create test source with a single metric counter of value 0
TestSource source = new TestSource("test");
MetricsSourceBuilder sb = MetricsAnnotations.newSourceBuilder(source);
final MetricsSource s = sb.build();
List<MetricsTag> injectedTags = new ArrayList<MetricsTag>();
MetricsSourceAdapter sa = new MetricsSourceAdapter("test", "test", "test desc", s, injectedTags, null, null, 1, false);
// all metrics are initially assumed to have changed
MetricsCollectorImpl builder = new MetricsCollectorImpl();
Iterable<MetricsRecordImpl> metricsRecords = sa.getMetrics(builder, true);
// Validate getMetrics and JMX initial values
MetricsRecordImpl metricsRecord = metricsRecords.iterator().next();
assertEquals(0L, metricsRecord.metrics().iterator().next().value().longValue());
// skip JMX cache TTL
Thread.sleep(100);
assertEquals(0L, (Number) sa.getAttribute("C1"));
// change metric value
source.incrementCnt();
// validate getMetrics and JMX
builder = new MetricsCollectorImpl();
metricsRecords = sa.getMetrics(builder, true);
metricsRecord = metricsRecords.iterator().next();
assertTrue(metricsRecord.metrics().iterator().hasNext());
// skip JMX cache TTL
Thread.sleep(100);
assertEquals(1L, (Number) sa.getAttribute("C1"));
}
use of org.apache.hadoop.metrics2.impl.MetricsCollectorImpl in project hadoop by apache.
the class TestReadWriteDiskValidator method testCheckFailures.
@Test
public void testCheckFailures() throws Throwable {
ReadWriteDiskValidator readWriteDiskValidator = (ReadWriteDiskValidator) DiskValidatorFactory.getInstance(ReadWriteDiskValidator.NAME);
// create a temporary test directory under the system test directory
File testDir = Files.createTempDirectory(Paths.get(System.getProperty("test.build.data")), "test").toFile();
try {
Shell.execCommand(Shell.getSetPermissionCommand("000", false, testDir.getAbsolutePath()));
} catch (Exception e) {
testDir.delete();
throw e;
}
try {
readWriteDiskValidator.checkStatus(testDir);
fail("Disk check should fail.");
} catch (DiskErrorException e) {
assertTrue(e.getMessage().equals("Disk Check failed!"));
}
MetricsSource source = ms.getSource(ReadWriteDiskValidatorMetrics.sourceName(testDir.toString()));
MetricsCollectorImpl collector = new MetricsCollectorImpl();
source.getMetrics(collector, true);
try {
readWriteDiskValidator.checkStatus(testDir);
fail("Disk check should fail.");
} catch (DiskErrorException e) {
assertTrue(e.getMessage().equals("Disk Check failed!"));
}
source.getMetrics(collector, true);
// verify the first metrics record
MetricsRecords.assertMetric(collector.getRecords().get(0), "FailureCount", 1);
Long lastFailureTime1 = (Long) MetricsRecords.getMetricValueByName(collector.getRecords().get(0), "LastFailureTime");
// verify the second metrics record
MetricsRecords.assertMetric(collector.getRecords().get(1), "FailureCount", 2);
Long lastFailureTime2 = (Long) MetricsRecords.getMetricValueByName(collector.getRecords().get(1), "LastFailureTime");
assertTrue("The first failure time should be less than the second one", lastFailureTime1 < lastFailureTime2);
testDir.delete();
}
use of org.apache.hadoop.metrics2.impl.MetricsCollectorImpl in project hadoop by apache.
the class TestReadWriteDiskValidator method testReadWriteDiskValidator.
@Test
public void testReadWriteDiskValidator() throws DiskErrorException, InterruptedException {
int count = 100;
File testDir = new File(System.getProperty("test.build.data"));
ReadWriteDiskValidator readWriteDiskValidator = (ReadWriteDiskValidator) DiskValidatorFactory.getInstance(ReadWriteDiskValidator.NAME);
for (int i = 0; i < count; i++) {
readWriteDiskValidator.checkStatus(testDir);
}
ReadWriteDiskValidatorMetrics metric = ReadWriteDiskValidatorMetrics.getMetric(testDir.toString());
Assert.assertEquals("The count number of estimator in MutableQuantiles" + "metrics of file read is not right", metric.getFileReadQuantiles()[0].getEstimator().getCount(), count);
Assert.assertEquals("The count number of estimator in MutableQuantiles" + "metrics of file write is not right", metric.getFileWriteQuantiles()[0].getEstimator().getCount(), count);
MetricsSource source = ms.getSource(ReadWriteDiskValidatorMetrics.sourceName(testDir.toString()));
MetricsCollectorImpl collector = new MetricsCollectorImpl();
source.getMetrics(collector, true);
MetricsRecords.assertMetric(collector.getRecords().get(0), "FailureCount", 0);
MetricsRecords.assertMetric(collector.getRecords().get(0), "LastFailureTime", (long) 0);
// All MutableQuantiles haven't rolled over yet because the minimum
// interval is 1 hours, so we just test if these metrics exist.
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "WriteLatency3600sNumOps");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "WriteLatency3600s50thPercentileLatencyMicros");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "WriteLatency86400sNumOps");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "WriteLatency864000sNumOps");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "ReadLatency3600sNumOps");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "ReadLatency3600s50thPercentileLatencyMicros");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "ReadLatency86400sNumOps");
MetricsRecords.assertMetricNotNull(collector.getRecords().get(0), "ReadLatency864000sNumOps");
}
use of org.apache.hadoop.metrics2.impl.MetricsCollectorImpl in project hadoop by apache.
the class TestContainerMetrics method testContainerMetricsFlow.
@Test
public void testContainerMetricsFlow() throws InterruptedException {
final String ERR = "Error in number of records";
MetricsCollectorImpl collector = new MetricsCollectorImpl();
ContainerId containerId = mock(ContainerId.class);
ContainerMetrics metrics = ContainerMetrics.forContainer(containerId, 100, 1);
metrics.recordMemoryUsage(1024);
metrics.getMetrics(collector, true);
assertEquals(ERR, 0, collector.getRecords().size());
Thread.sleep(110);
metrics.getMetrics(collector, true);
assertEquals(ERR, 1, collector.getRecords().size());
collector.clear();
Thread.sleep(110);
metrics.getMetrics(collector, true);
assertEquals(ERR, 1, collector.getRecords().size());
collector.clear();
metrics.finished();
metrics.getMetrics(collector, true);
assertEquals(ERR, 1, collector.getRecords().size());
collector.clear();
metrics.getMetrics(collector, true);
assertEquals(ERR, 1, collector.getRecords().size());
collector.clear();
Thread.sleep(110);
metrics.getMetrics(collector, true);
assertEquals(ERR, 1, collector.getRecords().size());
}
use of org.apache.hadoop.metrics2.impl.MetricsCollectorImpl in project hadoop by apache.
the class TestFairScheduler method testPerfMetricsInited.
@Test
public void testPerfMetricsInited() {
scheduler.init(conf);
scheduler.start();
MetricsCollectorImpl collector = new MetricsCollectorImpl();
scheduler.fsOpDurations.getMetrics(collector, true);
assertEquals("Incorrect number of perf metrics", 1, collector.getRecords().size());
}
Aggregations