use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestMetricsSystemImpl method testQSize.
@Test
public void testQSize() throws Exception {
new ConfigBuilder().add("*.period", 8).add("*.queue.capacity", 2).add("test.sink.test.class", TestSink.class.getName()).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
MetricsSystemImpl ms = new MetricsSystemImpl("Test");
final CountDownLatch proceedSignal = new CountDownLatch(1);
final CountDownLatch reachedPutMetricSignal = new CountDownLatch(1);
ms.start();
try {
MetricsSink slowSink = mock(MetricsSink.class);
MetricsSink dataSink = mock(MetricsSink.class);
ms.registerSink("slowSink", "The sink that will wait on putMetric", slowSink);
ms.registerSink("dataSink", "The sink I'll use to get info about slowSink", dataSink);
doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
reachedPutMetricSignal.countDown();
proceedSignal.await();
return null;
}
}).when(slowSink).putMetrics(any(MetricsRecord.class));
// trigger metric collection first time
ms.onTimerEvent();
assertTrue(reachedPutMetricSignal.await(1, TimeUnit.SECONDS));
// Now that the slow sink is still processing the first metric,
// its queue length should be 1 for the second collection.
ms.onTimerEvent();
verify(dataSink, timeout(500).times(2)).putMetrics(r1.capture());
List<MetricsRecord> mr = r1.getAllValues();
Number qSize = Iterables.find(mr.get(1).metrics(), new Predicate<AbstractMetric>() {
@Override
public boolean apply(@Nullable AbstractMetric input) {
assert input != null;
return input.name().equals("Sink_slowSinkQsize");
}
}).value();
assertEquals(1, qSize);
} finally {
proceedSignal.countDown();
ms.stop();
}
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestMetricsVisitor method testCommon.
/**
* Test the common use cases
*/
@Test
public void testCommon() {
MetricsVisitor visitor = mock(MetricsVisitor.class);
MetricsRegistry registry = new MetricsRegistry("test");
List<AbstractMetric> metrics = MetricsLists.builder("test").addCounter(info("c1", "int counter"), 1).addCounter(info("c2", "long counter"), 2L).addGauge(info("g1", "int gauge"), 5).addGauge(info("g2", "long gauge"), 6L).addGauge(info("g3", "float gauge"), 7f).addGauge(info("g4", "double gauge"), 8d).metrics();
for (AbstractMetric metric : metrics) {
metric.visit(visitor);
}
verify(visitor).counter(c1.capture(), eq(1));
assertEquals("c1 name", "c1", c1.getValue().name());
assertEquals("c1 description", "int counter", c1.getValue().description());
verify(visitor).counter(c2.capture(), eq(2L));
assertEquals("c2 name", "c2", c2.getValue().name());
assertEquals("c2 description", "long counter", c2.getValue().description());
verify(visitor).gauge(g1.capture(), eq(5));
assertEquals("g1 name", "g1", g1.getValue().name());
assertEquals("g1 description", "int gauge", g1.getValue().description());
verify(visitor).gauge(g2.capture(), eq(6L));
assertEquals("g2 name", "g2", g2.getValue().name());
assertEquals("g2 description", "long gauge", g2.getValue().description());
verify(visitor).gauge(g3.capture(), eq(7f));
assertEquals("g3 name", "g3", g3.getValue().name());
assertEquals("g3 description", "float gauge", g3.getValue().description());
verify(visitor).gauge(g4.capture(), eq(8d));
assertEquals("g4 name", "g4", g4.getValue().name());
assertEquals("g4 description", "double gauge", g4.getValue().description());
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class MetricsRecords method assertMetric.
public static void assertMetric(MetricsRecord record, String metricName, Number expectedValue) {
AbstractMetric resourceLimitMetric = getFirstMetricByName(record, metricName);
assertNotNull(resourceLimitMetric);
assertEquals(expectedValue, resourceLimitMetric.value());
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class MetricsRecords method getMetricValueByName.
public static Number getMetricValueByName(MetricsRecord record, String metricName) {
AbstractMetric resourceLimitMetric = getFirstMetricByName(record, metricName);
assertNotNull(resourceLimitMetric);
return resourceLimitMetric.value();
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class StatsDSink method putMetrics.
@Override
public void putMetrics(MetricsRecord record) {
String hn = hostName;
String ctx = record.context();
String sn = serviceName;
for (MetricsTag tag : record.tags()) {
if (tag.info().name().equals(MsInfo.Hostname.name()) && tag.value() != null) {
hn = tag.value();
} else if (tag.info().name().equals(MsInfo.Context.name()) && tag.value() != null) {
ctx = tag.value();
} else if (tag.info().name().equals(MsInfo.ProcessName.name()) && tag.value() != null) {
sn = tag.value();
}
}
StringBuilder buf = new StringBuilder();
if (!skipHostname && hn != null) {
int idx = hn.indexOf(".");
if (idx == -1) {
buf.append(hn).append(PERIOD);
} else {
buf.append(hn.substring(0, idx)).append(PERIOD);
}
}
buf.append(sn).append(PERIOD);
buf.append(ctx).append(PERIOD);
buf.append(record.name().replaceAll("\\.", "-")).append(PERIOD);
// Collect datapoints.
for (AbstractMetric metric : record.metrics()) {
String type = null;
if (metric.type().equals(MetricType.COUNTER)) {
type = "c";
} else if (metric.type().equals(MetricType.GAUGE)) {
type = "g";
}
StringBuilder line = new StringBuilder();
line.append(buf.toString()).append(metric.name().replace(' ', '_')).append(":").append(metric.value()).append("|").append(type);
writeMetric(line.toString());
}
}
Aggregations