use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class MetricsSystemImpl method register.
@Override
public synchronized <T> T register(String name, String desc, T source) {
MetricsSourceBuilder sb = MetricsAnnotations.newSourceBuilder(source);
final MetricsSource s = sb.build();
MetricsInfo si = sb.info();
String name2 = name == null ? si.name() : name;
final String finalDesc = desc == null ? si.description() : desc;
final // be friendly to non-metrics tests
String finalName = DefaultMetricsSystem.sourceName(name2, !monitoring);
allSources.put(finalName, s);
LOG.debug(finalName + ", " + finalDesc);
if (monitoring) {
registerSource(finalName, finalDesc, s);
}
// We want to re-register the source to pick up new config when the
// metrics system restarts.
register(finalName, new AbstractCallback() {
@Override
public void postStart() {
registerSource(finalName, finalDesc, s);
}
});
return source;
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class GraphiteSink method putMetrics.
@Override
public void putMetrics(MetricsRecord record) {
StringBuilder lines = new StringBuilder();
StringBuilder metricsPathPrefix = new StringBuilder();
// Configure the hierarchical place to display the graph.
metricsPathPrefix.append(metricsPrefix).append(".").append(record.context()).append(".").append(record.name());
for (MetricsTag tag : record.tags()) {
if (tag.value() != null) {
metricsPathPrefix.append(".");
metricsPathPrefix.append(tag.name());
metricsPathPrefix.append("=");
metricsPathPrefix.append(tag.value());
}
}
// The record timestamp is in milliseconds while Graphite expects an epoc time in seconds.
long timestamp = record.timestamp() / 1000L;
// Collect datapoints.
for (AbstractMetric metric : record.metrics()) {
lines.append(metricsPathPrefix.toString() + "." + metric.name().replace(' ', '.')).append(" ").append(metric.value()).append(" ").append(timestamp).append("\n");
}
try {
graphite.write(lines.toString());
} catch (Exception e) {
LOG.warn("Error sending metrics to Graphite", e);
try {
graphite.close();
} catch (Exception e1) {
throw new MetricsException("Error closing connection to Graphite", e1);
}
}
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestMutableMetrics method testMutableQuantilesRollover.
/**
* Test that {@link MutableQuantiles} rolls the window over at the specified
* interval.
*/
@Test(timeout = 30000)
public void testMutableQuantilesRollover() throws Exception {
MetricsRecordBuilder mb = mockMetricsRecordBuilder();
MetricsRegistry registry = new MetricsRegistry("test");
// Use a 5s rollover period
MutableQuantiles quantiles = registry.newQuantiles("foo", "stat", "Ops", "Latency", 5);
Quantile[] quants = MutableQuantiles.quantiles;
String name = "Foo%dthPercentileLatency";
String desc = "%d percentile latency with 5 second interval for stat";
// Push values for three intervals
long start = System.nanoTime() / 1000000;
for (int i = 1; i <= 3; i++) {
// Insert the values
for (long j = 1; j <= 1000; j++) {
quantiles.add(i);
}
// Sleep until 1s after the next 5s interval, to let the metrics
// roll over
long sleep = (start + (5000 * i) + 1000) - (System.nanoTime() / 1000000);
Thread.sleep(sleep);
// Verify that the window reset, check it has the values we pushed in
registry.snapshot(mb, false);
for (Quantile q : quants) {
int percentile = (int) (100 * q.quantile);
String n = String.format(name, percentile);
String d = String.format(desc, percentile);
verify(mb).addGauge(info(n, d), (long) i);
}
}
// Verify the metrics were added the right number of times
verify(mb, times(3)).addGauge(info("FooNumOps", "Number of ops for stat with 5s interval"), (long) 1000);
for (Quantile q : quants) {
int percentile = (int) (100 * q.quantile);
String n = String.format(name, percentile);
String d = String.format(desc, percentile);
verify(mb, times(3)).addGauge(eq(info(n, d)), anyLong());
}
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestRollingAverages method testRollingAveragesRollover.
/**
* Tests the case:
* <p>
* 5s interval and 2 sliding windows
* </p>
* <p>
* sample stream: 1000 times 1, 2, and 3, respectively, e.g. [1, 1...1], [2,
* 2...2] and [3, 3...3]
* </p>
*/
@Test(timeout = 30000)
public void testRollingAveragesRollover() throws Exception {
final MetricsRecordBuilder rb = mockMetricsRecordBuilder();
final String name = "foo2";
// 5s roll over interval
final int windowSizeMs = 5000;
final int numWindows = 2;
final int numOpsPerIteration = 1000;
try (RollingAverages rollingAverages = new RollingAverages(windowSizeMs, numWindows)) {
/* Push values for three intervals */
final long start = Time.monotonicNow();
for (int i = 1; i <= 3; i++) {
/* insert value */
for (long j = 1; j <= numOpsPerIteration; j++) {
rollingAverages.add(name, i);
}
/**
* Sleep until 1s after the next windowSize seconds interval, to let the
* metrics roll over
*/
final long sleep = (start + (windowSizeMs * i) + 1000) - Time.monotonicNow();
Thread.sleep(sleep);
/* Verify that the window reset, check it has the values we pushed in */
rollingAverages.snapshot(rb, false);
/*
* #1 window with a series of 1 1000
* times, e.g. [1, 1...1], similarly, #2 window, e.g. [2, 2...2],
* #3 window, e.g. [3, 3...3]
*/
final double rollingSum = numOpsPerIteration * (i > 1 ? (i - 1) : 0) + numOpsPerIteration * i;
/* one empty window or all 2 windows full */
final long rollingTotal = i > 1 ? 2 * numOpsPerIteration : numOpsPerIteration;
verify(rb).addGauge(info("[Foo2]RollingAvgTime", "Rolling average time for foo2"), rollingSum / rollingTotal);
/* Verify the metrics were added the right number of times */
verify(rb, times(i)).addGauge(eq(info("[Foo2]RollingAvgTime", "Rolling average time for foo2")), anyDouble());
}
}
}
use of org.apache.hadoop.metrics2.annotation.Metrics in project hadoop by apache.
the class TestMetricsSourceAdapter method testGetMetricsAndJmx.
@Test
public void testGetMetricsAndJmx() throws Exception {
// create test source with a single metric counter of value 0
TestSource source = new TestSource("test");
MetricsSourceBuilder sb = MetricsAnnotations.newSourceBuilder(source);
final MetricsSource s = sb.build();
List<MetricsTag> injectedTags = new ArrayList<MetricsTag>();
MetricsSourceAdapter sa = new MetricsSourceAdapter("test", "test", "test desc", s, injectedTags, null, null, 1, false);
// all metrics are initially assumed to have changed
MetricsCollectorImpl builder = new MetricsCollectorImpl();
Iterable<MetricsRecordImpl> metricsRecords = sa.getMetrics(builder, true);
// Validate getMetrics and JMX initial values
MetricsRecordImpl metricsRecord = metricsRecords.iterator().next();
assertEquals(0L, metricsRecord.metrics().iterator().next().value().longValue());
// skip JMX cache TTL
Thread.sleep(100);
assertEquals(0L, (Number) sa.getAttribute("C1"));
// change metric value
source.incrementCnt();
// validate getMetrics and JMX
builder = new MetricsCollectorImpl();
metricsRecords = sa.getMetrics(builder, true);
metricsRecord = metricsRecords.iterator().next();
assertTrue(metricsRecord.metrics().iterator().hasNext());
// skip JMX cache TTL
Thread.sleep(100);
assertEquals(1L, (Number) sa.getAttribute("C1"));
}
Aggregations