use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestContainerMetrics method testContainerMetricsHistogram.
/**
* Run a test to submit values for actual memory usage and see if the
* histogram comes out correctly.
* @throws Exception
*/
@Test
public void testContainerMetricsHistogram() throws Exception {
// submit 2 values - 1024 and 2048. 75th, 90th, 95th and 99th percentiles
// will be 2048. 50th percentile will be 1536((1024+2048)/2)
// if we keep recording 1024 and 2048 in a loop, the 50th percentile
// will tend closer to 2048
Map<String, Long> expectedValues = new HashMap<>();
expectedValues.put("PMemUsageMBHistogram50thPercentileMBs", 1536L);
expectedValues.put("PMemUsageMBHistogram75thPercentileMBs", 2048L);
expectedValues.put("PMemUsageMBHistogram90thPercentileMBs", 2048L);
expectedValues.put("PMemUsageMBHistogram95thPercentileMBs", 2048L);
expectedValues.put("PMemUsageMBHistogram99thPercentileMBs", 2048L);
expectedValues.put("PCpuUsagePercentHistogram50thPercentilePercents", 0L);
expectedValues.put("PCpuUsagePercentHistogram75thPercentilePercents", 0L);
expectedValues.put("PCpuUsagePercentHistogram90thPercentilePercents", 0L);
expectedValues.put("PCpuUsagePercentHistogram95thPercentilePercents", 0L);
expectedValues.put("PCpuUsagePercentHistogram99thPercentilePercents", 0L);
Set<String> testResults = new HashSet<>();
int delay = 10;
int rolloverDelay = 1000;
MetricsCollectorImpl collector = new MetricsCollectorImpl();
ContainerId containerId = mock(ContainerId.class);
ContainerMetrics metrics = ContainerMetrics.forContainer(containerId, delay, 0);
metrics.recordMemoryUsage(1024);
metrics.recordMemoryUsage(2048);
Thread.sleep(rolloverDelay + 10);
metrics.getMetrics(collector, true);
for (MetricsRecord record : collector.getRecords()) {
for (AbstractMetric metric : record.metrics()) {
String metricName = metric.name();
if (expectedValues.containsKey(metricName)) {
Long expectedValue = expectedValues.get(metricName);
Assert.assertEquals("Metric " + metricName + " doesn't have expected value", expectedValue, metric.value());
testResults.add(metricName);
}
}
}
Assert.assertEquals(expectedValues.keySet(), testResults);
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestGangliaMetrics method testTagsForPrefix.
@Test
public void testTagsForPrefix() throws Exception {
ConfigBuilder cb = new ConfigBuilder().add(testNamePrefix + ".sink.ganglia.tagsForPrefix.all", "*").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " + "NumActiveSources").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.none", "");
GangliaSink30 sink = new GangliaSink30();
sink.init(cb.subset(testNamePrefix + ".sink.ganglia"));
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
tags.add(new MetricsTag(MsInfo.NumActiveSources, "foo"));
tags.add(new MetricsTag(MsInfo.NumActiveSinks, "bar"));
tags.add(new MetricsTag(MsInfo.NumAllSinks, "haa"));
tags.add(new MetricsTag(MsInfo.Hostname, "host"));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 1, tags, metrics);
StringBuilder sb = new StringBuilder();
sink.appendPrefix(record, sb);
assertEquals(".NumActiveSources=foo.NumActiveSinks=bar.NumAllSinks=haa", sb.toString());
tags.set(0, new MetricsTag(MsInfo.Context, "some"));
sb = new StringBuilder();
sink.appendPrefix(record, sb);
assertEquals(".NumActiveSources=foo.NumActiveSinks=bar", sb.toString());
tags.set(0, new MetricsTag(MsInfo.Context, "none"));
sb = new StringBuilder();
sink.appendPrefix(record, sb);
assertEquals("", sb.toString());
tags.set(0, new MetricsTag(MsInfo.Context, "nada"));
sb = new StringBuilder();
sink.appendPrefix(record, sb);
assertEquals("", sb.toString());
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestGraphiteMetrics method testPutMetrics2.
@Test
public void testPutMetrics2() {
GraphiteSink sink = new GraphiteSink();
List<MetricsTag> tags = new ArrayList<MetricsTag>();
tags.add(new MetricsTag(MsInfo.Context, "all"));
tags.add(new MetricsTag(MsInfo.Hostname, null));
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1));
metrics.add(makeMetric("foo2", 2));
MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
sink.putMetrics(record);
try {
verify(mockGraphite).write(argument.capture());
} catch (IOException e) {
e.printStackTrace();
}
String result = argument.getValue();
assertEquals(true, result.equals("null.all.Context.Context=all.foo1 1 10\n" + "null.all.Context.Context=all.foo2 2 10\n") || result.equals("null.all.Context.Context=all.foo2 2 10\n" + "null.all.Context.Context=all.foo1 1 10\n"));
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestGraphiteMetrics method makeMetric.
private AbstractMetric makeMetric(String name, Number value) {
AbstractMetric metric = mock(AbstractMetric.class);
when(metric.name()).thenReturn(name);
when(metric.value()).thenReturn(value);
return metric;
}
use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.
the class TestGraphiteMetrics method testPutMetrics3.
/**
* Assert that timestamps are converted correctly, ticket HADOOP-11182
*/
@Test
public void testPutMetrics3() {
// setup GraphiteSink
GraphiteSink sink = new GraphiteSink();
final GraphiteSink.Graphite mockGraphite = makeGraphite();
Whitebox.setInternalState(sink, "graphite", mockGraphite);
// given two metrics records with timestamps 1000 milliseconds apart.
List<MetricsTag> tags = Collections.emptyList();
Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
metrics.add(makeMetric("foo1", 1));
MetricsRecord record1 = new MetricsRecordImpl(MsInfo.Context, 1000000000000L, tags, metrics);
MetricsRecord record2 = new MetricsRecordImpl(MsInfo.Context, 1000000001000L, tags, metrics);
sink.putMetrics(record1);
sink.putMetrics(record2);
sink.flush();
try {
sink.close();
} catch (IOException e) {
e.printStackTrace();
}
// then the timestamps in the graphite stream should differ by one second.
try {
verify(mockGraphite).write(eq("null.default.Context.foo1 1 1000000000\n"));
verify(mockGraphite).write(eq("null.default.Context.foo1 1 1000000001\n"));
} catch (IOException e) {
e.printStackTrace();
}
}
Aggregations