Search in sources :

Example 16 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestContainerMetrics method testContainerMetricsHistogram.

/**
   * Run a test to submit values for actual memory usage and see if the
   * histogram comes out correctly.
   * @throws Exception
   */
@Test
public void testContainerMetricsHistogram() throws Exception {
    // submit 2 values - 1024 and 2048. 75th, 90th, 95th and 99th percentiles
    // will be 2048. 50th percentile will be 1536((1024+2048)/2)
    // if we keep recording 1024 and 2048 in a loop, the 50th percentile
    // will tend closer to 2048
    Map<String, Long> expectedValues = new HashMap<>();
    expectedValues.put("PMemUsageMBHistogram50thPercentileMBs", 1536L);
    expectedValues.put("PMemUsageMBHistogram75thPercentileMBs", 2048L);
    expectedValues.put("PMemUsageMBHistogram90thPercentileMBs", 2048L);
    expectedValues.put("PMemUsageMBHistogram95thPercentileMBs", 2048L);
    expectedValues.put("PMemUsageMBHistogram99thPercentileMBs", 2048L);
    expectedValues.put("PCpuUsagePercentHistogram50thPercentilePercents", 0L);
    expectedValues.put("PCpuUsagePercentHistogram75thPercentilePercents", 0L);
    expectedValues.put("PCpuUsagePercentHistogram90thPercentilePercents", 0L);
    expectedValues.put("PCpuUsagePercentHistogram95thPercentilePercents", 0L);
    expectedValues.put("PCpuUsagePercentHistogram99thPercentilePercents", 0L);
    Set<String> testResults = new HashSet<>();
    int delay = 10;
    int rolloverDelay = 1000;
    MetricsCollectorImpl collector = new MetricsCollectorImpl();
    ContainerId containerId = mock(ContainerId.class);
    ContainerMetrics metrics = ContainerMetrics.forContainer(containerId, delay, 0);
    metrics.recordMemoryUsage(1024);
    metrics.recordMemoryUsage(2048);
    Thread.sleep(rolloverDelay + 10);
    metrics.getMetrics(collector, true);
    for (MetricsRecord record : collector.getRecords()) {
        for (AbstractMetric metric : record.metrics()) {
            String metricName = metric.name();
            if (expectedValues.containsKey(metricName)) {
                Long expectedValue = expectedValues.get(metricName);
                Assert.assertEquals("Metric " + metricName + " doesn't have expected value", expectedValue, metric.value());
                testResults.add(metricName);
            }
        }
    }
    Assert.assertEquals(expectedValues.keySet(), testResults);
}
Also used : HashMap(java.util.HashMap) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsCollectorImpl(org.apache.hadoop.metrics2.impl.MetricsCollectorImpl) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 17 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestGangliaMetrics method testTagsForPrefix.

@Test
public void testTagsForPrefix() throws Exception {
    ConfigBuilder cb = new ConfigBuilder().add(testNamePrefix + ".sink.ganglia.tagsForPrefix.all", "*").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " + "NumActiveSources").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.none", "");
    GangliaSink30 sink = new GangliaSink30();
    sink.init(cb.subset(testNamePrefix + ".sink.ganglia"));
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Context, "all"));
    tags.add(new MetricsTag(MsInfo.NumActiveSources, "foo"));
    tags.add(new MetricsTag(MsInfo.NumActiveSinks, "bar"));
    tags.add(new MetricsTag(MsInfo.NumAllSinks, "haa"));
    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 1, tags, metrics);
    StringBuilder sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar.NumAllSinks=haa", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "some"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "none"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals("", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "nada"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals("", sb.toString());
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) GangliaSink30(org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 18 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestGraphiteMetrics method testPutMetrics2.

@Test
public void testPutMetrics2() {
    GraphiteSink sink = new GraphiteSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Context, "all"));
    tags.add(new MetricsTag(MsInfo.Hostname, null));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1));
    metrics.add(makeMetric("foo2", 2));
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
    final GraphiteSink.Graphite mockGraphite = makeGraphite();
    Whitebox.setInternalState(sink, "graphite", mockGraphite);
    sink.putMetrics(record);
    try {
        verify(mockGraphite).write(argument.capture());
    } catch (IOException e) {
        e.printStackTrace();
    }
    String result = argument.getValue();
    assertEquals(true, result.equals("null.all.Context.Context=all.foo1 1 10\n" + "null.all.Context.Context=all.foo2 2 10\n") || result.equals("null.all.Context.Context=all.foo2 2 10\n" + "null.all.Context.Context=all.foo1 1 10\n"));
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) Matchers.anyString(org.mockito.Matchers.anyString) IOException(java.io.IOException) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) GraphiteSink(org.apache.hadoop.metrics2.sink.GraphiteSink) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 19 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestGraphiteMetrics method makeMetric.

private AbstractMetric makeMetric(String name, Number value) {
    AbstractMetric metric = mock(AbstractMetric.class);
    when(metric.name()).thenReturn(name);
    when(metric.value()).thenReturn(value);
    return metric;
}
Also used : AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric)

Example 20 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestGraphiteMetrics method testPutMetrics3.

/**
     * Assert that timestamps are converted correctly, ticket HADOOP-11182
     */
@Test
public void testPutMetrics3() {
    // setup GraphiteSink
    GraphiteSink sink = new GraphiteSink();
    final GraphiteSink.Graphite mockGraphite = makeGraphite();
    Whitebox.setInternalState(sink, "graphite", mockGraphite);
    // given two metrics records with timestamps 1000 milliseconds apart.
    List<MetricsTag> tags = Collections.emptyList();
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1));
    MetricsRecord record1 = new MetricsRecordImpl(MsInfo.Context, 1000000000000L, tags, metrics);
    MetricsRecord record2 = new MetricsRecordImpl(MsInfo.Context, 1000000001000L, tags, metrics);
    sink.putMetrics(record1);
    sink.putMetrics(record2);
    sink.flush();
    try {
        sink.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
    // then the timestamps in the graphite stream should differ by one second.
    try {
        verify(mockGraphite).write(eq("null.default.Context.foo1 1 1000000000\n"));
        verify(mockGraphite).write(eq("null.default.Context.foo1 1 1000000001\n"));
    } catch (IOException e) {
        e.printStackTrace();
    }
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) IOException(java.io.IOException) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) GraphiteSink(org.apache.hadoop.metrics2.sink.GraphiteSink) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)32 MetricsTag (org.apache.hadoop.metrics2.MetricsTag)19 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)17 Test (org.junit.Test)12 ArrayList (java.util.ArrayList)8 HashSet (java.util.HashSet)8 IOException (java.io.IOException)7 GraphiteSink (org.apache.hadoop.metrics2.sink.GraphiteSink)4 MetricsException (org.apache.hadoop.metrics2.MetricsException)3 Matchers.anyString (org.mockito.Matchers.anyString)3 DatagramPacket (java.net.DatagramPacket)2 DatagramSocket (java.net.DatagramSocket)2 HashMap (java.util.HashMap)2 MetricsVisitor (org.apache.hadoop.metrics2.MetricsVisitor)2 MetricsCollectorImpl (org.apache.hadoop.metrics2.impl.MetricsCollectorImpl)2 StatsDSink (org.apache.hadoop.metrics2.sink.StatsDSink)2 StatsD (org.apache.hadoop.metrics2.sink.StatsDSink.StatsD)2 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)2 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)2 Predicate (com.google.common.base.Predicate)1