Search in sources :

Example 6 with MetricsRecord

use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.

the class TestMetricsSourceAdapter method testGetMetricsAndJmx.

@Test
public void testGetMetricsAndJmx() throws Exception {
    // create test source with a single metric counter of value 0
    TestSource source = new TestSource("test");
    MetricsSourceBuilder sb = MetricsAnnotations.newSourceBuilder(source);
    final MetricsSource s = sb.build();
    List<MetricsTag> injectedTags = new ArrayList<MetricsTag>();
    MetricsSourceAdapter sa = new MetricsSourceAdapter("test", "test", "test desc", s, injectedTags, null, null, 1, false);
    // all metrics are initially assumed to have changed
    MetricsCollectorImpl builder = new MetricsCollectorImpl();
    Iterable<MetricsRecordImpl> metricsRecords = sa.getMetrics(builder, true);
    // Validate getMetrics and JMX initial values
    MetricsRecordImpl metricsRecord = metricsRecords.iterator().next();
    assertEquals(0L, metricsRecord.metrics().iterator().next().value().longValue());
    // skip JMX cache TTL
    Thread.sleep(100);
    assertEquals(0L, (Number) sa.getAttribute("C1"));
    // change metric value
    source.incrementCnt();
    // validate getMetrics and JMX
    builder = new MetricsCollectorImpl();
    metricsRecords = sa.getMetrics(builder, true);
    metricsRecord = metricsRecords.iterator().next();
    assertTrue(metricsRecord.metrics().iterator().hasNext());
    // skip JMX cache TTL
    Thread.sleep(100);
    assertEquals(1L, (Number) sa.getAttribute("C1"));
}
Also used : MetricsSource(org.apache.hadoop.metrics2.MetricsSource) MetricsSourceBuilder(org.apache.hadoop.metrics2.lib.MetricsSourceBuilder) ArrayList(java.util.ArrayList) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) Test(org.junit.Test)

Example 7 with MetricsRecord

use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.

the class TestMetricsSystemImpl method checkMetricsRecords.

private void checkMetricsRecords(List<MetricsRecord> recs) {
    LOG.debug(recs);
    MetricsRecord r = recs.get(0);
    assertEquals("name", "s1rec", r.name());
    assertEquals("tags", new MetricsTag[] { tag(MsInfo.Context, "test"), tag(MsInfo.Hostname, hostname) }, r.tags());
    assertEquals("metrics", MetricsLists.builder("").addCounter(info("C1", "C1 desc"), 1L).addGauge(info("G1", "G1 desc"), 2L).addCounter(info("S1NumOps", "Number of ops for s1"), 1L).addGauge(info("S1AvgTime", "Average time for s1"), 0.0).metrics(), r.metrics());
    r = recs.get(1);
    assertTrue("NumActiveSinks should be 3", Iterables.contains(r.metrics(), new MetricGaugeInt(MsInfo.NumActiveSinks, 3)));
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord)

Example 8 with MetricsRecord

use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.

the class TestMetricsSystemImpl method testInitFirstVerifyStopInvokedImmediately.

@Test
public void testInitFirstVerifyStopInvokedImmediately() throws Exception {
    DefaultMetricsSystem.shutdown();
    new ConfigBuilder().add("*.period", 8).add("test.sink.test.class", TestSink.class.getName()).add("test.*.source.filter.exclude", "s0").add("test.source.s1.metric.filter.exclude", "X*").add("test.sink.sink1.metric.filter.exclude", "Y*").add("test.sink.sink2.metric.filter.exclude", "Y*").save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    ms.start();
    ms.register("s0", "s0 desc", new TestSource("s0rec"));
    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
    s1.c1.incr();
    s1.xxx.incr();
    s1.g1.set(2);
    s1.yyy.incr(2);
    s1.s1.add(0);
    MetricsSink sink1 = mock(MetricsSink.class);
    MetricsSink sink2 = mock(MetricsSink.class);
    ms.registerSink("sink1", "sink1 desc", sink1);
    ms.registerSink("sink2", "sink2 desc", sink2);
    // publish the metrics
    ms.publishMetricsNow();
    ms.stop();
    ms.shutdown();
    //When we call stop, at most two sources will be consumed by each sink thread.
    verify(sink1, atMost(2)).putMetrics(r1.capture());
    List<MetricsRecord> mr1 = r1.getAllValues();
    verify(sink2, atMost(2)).putMetrics(r2.capture());
    List<MetricsRecord> mr2 = r2.getAllValues();
    if (mr1.size() != 0 && mr2.size() != 0) {
        checkMetricsRecords(mr1);
        assertEquals("output", mr1, mr2);
    } else if (mr1.size() != 0) {
        checkMetricsRecords(mr1);
    } else if (mr2.size() != 0) {
        checkMetricsRecords(mr2);
    }
}
Also used : MetricsSink(org.apache.hadoop.metrics2.MetricsSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) Test(org.junit.Test)

Example 9 with MetricsRecord

use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.

the class TestStatsDMetrics method testPutMetrics.

@Test(timeout = 3000)
public void testPutMetrics() throws IOException, InterruptedException {
    final StatsDSink sink = new StatsDSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
    tags.add(new MetricsTag(MsInfo.Context, "jvm"));
    tags.add(new MetricsTag(MsInfo.ProcessName, "process"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1.25, MetricType.COUNTER));
    metrics.add(makeMetric("foo2", 2.25, MetricType.GAUGE));
    final MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    try (DatagramSocket sock = new DatagramSocket()) {
        sock.setReceiveBufferSize(8192);
        final StatsDSink.StatsD mockStatsD = new StatsD(sock.getLocalAddress().getHostName(), sock.getLocalPort());
        Whitebox.setInternalState(sink, "statsd", mockStatsD);
        final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
        sink.putMetrics(record);
        sock.receive(p);
        String result = new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
        assertTrue("Received data did not match data sent", result.equals("host.process.jvm.Context.foo1:1.25|c") || result.equals("host.process.jvm.Context.foo2:2.25|g"));
    } finally {
        sink.close();
    }
}
Also used : StatsDSink(org.apache.hadoop.metrics2.sink.StatsDSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramSocket(java.net.DatagramSocket) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramPacket(java.net.DatagramPacket) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 10 with MetricsRecord

use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.

the class TestStatsDMetrics method testPutMetrics2.

@Test(timeout = 3000)
public void testPutMetrics2() throws IOException {
    StatsDSink sink = new StatsDSink();
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Hostname, null));
    tags.add(new MetricsTag(MsInfo.Context, "jvm"));
    tags.add(new MetricsTag(MsInfo.ProcessName, "process"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    metrics.add(makeMetric("foo1", 1, MetricType.COUNTER));
    metrics.add(makeMetric("foo2", 2, MetricType.GAUGE));
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 10000, tags, metrics);
    try (DatagramSocket sock = new DatagramSocket()) {
        sock.setReceiveBufferSize(8192);
        final StatsDSink.StatsD mockStatsD = new StatsD(sock.getLocalAddress().getHostName(), sock.getLocalPort());
        Whitebox.setInternalState(sink, "statsd", mockStatsD);
        final DatagramPacket p = new DatagramPacket(new byte[8192], 8192);
        sink.putMetrics(record);
        sock.receive(p);
        String result = new String(p.getData(), 0, p.getLength(), Charset.forName("UTF-8"));
        assertTrue("Received data did not match data sent", result.equals("process.jvm.Context.foo1:1|c") || result.equals("process.jvm.Context.foo2:2|g"));
    } finally {
        sink.close();
    }
}
Also used : StatsDSink(org.apache.hadoop.metrics2.sink.StatsDSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramSocket(java.net.DatagramSocket) StatsD(org.apache.hadoop.metrics2.sink.StatsDSink.StatsD) DatagramPacket(java.net.DatagramPacket) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)27 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)25 MetricsTag (org.apache.hadoop.metrics2.MetricsTag)20 Test (org.junit.Test)18 ArrayList (java.util.ArrayList)10 HashSet (java.util.HashSet)8 IOException (java.io.IOException)7 MetricsCollectorImpl (org.apache.hadoop.metrics2.impl.MetricsCollectorImpl)4 GraphiteSink (org.apache.hadoop.metrics2.sink.GraphiteSink)4 MetricsException (org.apache.hadoop.metrics2.MetricsException)3 MetricsSink (org.apache.hadoop.metrics2.MetricsSink)3 Matchers.anyString (org.mockito.Matchers.anyString)3 DatagramPacket (java.net.DatagramPacket)2 DatagramSocket (java.net.DatagramSocket)2 HashMap (java.util.HashMap)2 StatsDSink (org.apache.hadoop.metrics2.sink.StatsDSink)2 StatsD (org.apache.hadoop.metrics2.sink.StatsDSink.StatsD)2 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)2 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)2 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)2