Search in sources :

Example 11 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestFSNamesystemMBean method testWithFSNamesystemWriteLock.

// The test makes sure JMX request can be processed even if namesystem's
// writeLock is owned by another thread.
@Test
public void testWithFSNamesystemWriteLock() throws Exception {
    Configuration conf = new Configuration();
    MiniDFSCluster cluster = null;
    FSNamesystem fsn = null;
    int jmxCachePeriod = 1;
    new ConfigBuilder().add("namenode.period", jmxCachePeriod).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-namenode"));
    try {
        cluster = new MiniDFSCluster.Builder(conf).build();
        cluster.waitActive();
        fsn = cluster.getNameNode().namesystem;
        fsn.writeLock();
        Thread.sleep(jmxCachePeriod * 1000);
        MBeanClient client = new MBeanClient();
        client.start();
        client.join(20000);
        assertTrue("JMX calls are blocked when FSNamesystem's writerlock" + "is owned by another thread", client.succeeded);
        client.interrupt();
    } finally {
        if (fsn != null && fsn.hasWriteLock()) {
            fsn.writeUnlock();
        }
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) Test(org.junit.Test)

Example 12 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestGangliaMetrics method testGangliaMetrics2.

@Test
public void testGangliaMetrics2() throws Exception {
    // Setting long interval to avoid periodic publishing.
    // We manually publish metrics by MeticsSystem#publishMetricsNow here.
    ConfigBuilder cb = new ConfigBuilder().add("*.period", 120).add(testNamePrefix + ".sink.gsink30.context", // filter out only "test"
    testNamePrefix).add(testNamePrefix + ".sink.gsink31.context", // filter out only "test"
    testNamePrefix).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-" + testNamePrefix));
    MetricsSystemImpl ms = new MetricsSystemImpl(testNamePrefix);
    ms.start();
    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
    s1.c1.incr();
    s1.xxx.incr();
    s1.g1.set(2);
    s1.yyy.incr(2);
    s1.s1.add(0);
    final int expectedCountFromGanglia30 = expectedMetrics.length;
    final int expectedCountFromGanglia31 = 2 * expectedMetrics.length;
    // Setup test for GangliaSink30
    AbstractGangliaSink gsink30 = new GangliaSink30();
    gsink30.init(cb.subset(testNamePrefix));
    MockDatagramSocket mockds30 = new MockDatagramSocket();
    GangliaMetricsTestHelper.setDatagramSocket(gsink30, mockds30);
    // Setup test for GangliaSink31
    AbstractGangliaSink gsink31 = new GangliaSink31();
    gsink31.init(cb.subset(testNamePrefix));
    MockDatagramSocket mockds31 = new MockDatagramSocket();
    GangliaMetricsTestHelper.setDatagramSocket(gsink31, mockds31);
    // register the sinks
    ms.register("gsink30", "gsink30 desc", gsink30);
    ms.register("gsink31", "gsink31 desc", gsink31);
    // publish the metrics
    ms.publishMetricsNow();
    ms.stop();
    // check GanfliaSink30 data
    checkMetrics(mockds30.getCapturedSend(), expectedCountFromGanglia30);
    // check GanfliaSink31 data
    checkMetrics(mockds31.getCapturedSend(), expectedCountFromGanglia31);
}
Also used : AbstractGangliaSink(org.apache.hadoop.metrics2.sink.ganglia.AbstractGangliaSink) GangliaSink31(org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31) GangliaSink30(org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30) Test(org.junit.Test)

Example 13 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestGangliaMetrics method testTagsForPrefix.

@Test
public void testTagsForPrefix() throws Exception {
    ConfigBuilder cb = new ConfigBuilder().add(testNamePrefix + ".sink.ganglia.tagsForPrefix.all", "*").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.some", "NumActiveSinks, " + "NumActiveSources").add(testNamePrefix + ".sink.ganglia.tagsForPrefix.none", "");
    GangliaSink30 sink = new GangliaSink30();
    sink.init(cb.subset(testNamePrefix + ".sink.ganglia"));
    List<MetricsTag> tags = new ArrayList<MetricsTag>();
    tags.add(new MetricsTag(MsInfo.Context, "all"));
    tags.add(new MetricsTag(MsInfo.NumActiveSources, "foo"));
    tags.add(new MetricsTag(MsInfo.NumActiveSinks, "bar"));
    tags.add(new MetricsTag(MsInfo.NumAllSinks, "haa"));
    tags.add(new MetricsTag(MsInfo.Hostname, "host"));
    Set<AbstractMetric> metrics = new HashSet<AbstractMetric>();
    MetricsRecord record = new MetricsRecordImpl(MsInfo.Context, (long) 1, tags, metrics);
    StringBuilder sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar.NumAllSinks=haa", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "some"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals(".NumActiveSources=foo.NumActiveSinks=bar", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "none"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals("", sb.toString());
    tags.set(0, new MetricsTag(MsInfo.Context, "nada"));
    sb = new StringBuilder();
    sink.appendPrefix(record, sb);
    assertEquals("", sb.toString());
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) ArrayList(java.util.ArrayList) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) GangliaSink30(org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 14 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestMetricsSystemImpl method testInitFirstVerifyCallBacks.

@Test
public void testInitFirstVerifyCallBacks() throws Exception {
    DefaultMetricsSystem.shutdown();
    new ConfigBuilder().add("*.period", 8).add("test.sink.test.class", TestSink.class.getName()).add("test.*.source.filter.exclude", "s0").add("test.source.s1.metric.filter.exclude", "X*").add("test.sink.sink1.metric.filter.exclude", "Y*").add("test.sink.sink2.metric.filter.exclude", "Y*").save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    ms.start();
    ms.register("s0", "s0 desc", new TestSource("s0rec"));
    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
    s1.c1.incr();
    s1.xxx.incr();
    s1.g1.set(2);
    s1.yyy.incr(2);
    s1.s1.add(0);
    MetricsSink sink1 = mock(MetricsSink.class);
    MetricsSink sink2 = mock(MetricsSink.class);
    ms.registerSink("sink1", "sink1 desc", sink1);
    ms.registerSink("sink2", "sink2 desc", sink2);
    // publish the metrics
    ms.publishMetricsNow();
    try {
        verify(sink1, timeout(200).times(2)).putMetrics(r1.capture());
        verify(sink2, timeout(200).times(2)).putMetrics(r2.capture());
    } finally {
        ms.stop();
        ms.shutdown();
    }
    //When we call stop, at most two sources will be consumed by each sink thread.
    List<MetricsRecord> mr1 = r1.getAllValues();
    List<MetricsRecord> mr2 = r2.getAllValues();
    checkMetricsRecords(mr1);
    assertEquals("output", mr1, mr2);
}
Also used : MetricsSink(org.apache.hadoop.metrics2.MetricsSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) Test(org.junit.Test)

Example 15 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestMetricsSystemImpl method testQSize.

@Test
public void testQSize() throws Exception {
    new ConfigBuilder().add("*.period", 8).add("*.queue.capacity", 2).add("test.sink.test.class", TestSink.class.getName()).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    final CountDownLatch proceedSignal = new CountDownLatch(1);
    final CountDownLatch reachedPutMetricSignal = new CountDownLatch(1);
    ms.start();
    try {
        MetricsSink slowSink = mock(MetricsSink.class);
        MetricsSink dataSink = mock(MetricsSink.class);
        ms.registerSink("slowSink", "The sink that will wait on putMetric", slowSink);
        ms.registerSink("dataSink", "The sink I'll use to get info about slowSink", dataSink);
        doAnswer(new Answer() {

            @Override
            public Object answer(InvocationOnMock invocation) throws Throwable {
                reachedPutMetricSignal.countDown();
                proceedSignal.await();
                return null;
            }
        }).when(slowSink).putMetrics(any(MetricsRecord.class));
        // trigger metric collection first time
        ms.onTimerEvent();
        assertTrue(reachedPutMetricSignal.await(1, TimeUnit.SECONDS));
        // Now that the slow sink is still processing the first metric,
        // its queue length should be 1 for the second collection.
        ms.onTimerEvent();
        verify(dataSink, timeout(500).times(2)).putMetrics(r1.capture());
        List<MetricsRecord> mr = r1.getAllValues();
        Number qSize = Iterables.find(mr.get(1).metrics(), new Predicate<AbstractMetric>() {

            @Override
            public boolean apply(@Nullable AbstractMetric input) {
                assert input != null;
                return input.name().equals("Sink_slowSinkQsize");
            }
        }).value();
        assertEquals(1, qSize);
    } finally {
        proceedSignal.countDown();
        ms.stop();
    }
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) Predicate(com.google.common.base.Predicate) Answer(org.mockito.stubbing.Answer) MetricsSink(org.apache.hadoop.metrics2.MetricsSink) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Nullable(javax.annotation.Nullable) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)20 ConfigBuilder (org.apache.hadoop.metrics2.impl.ConfigBuilder)16 SubsetConfiguration (org.apache.commons.configuration2.SubsetConfiguration)11 DatagramSocket (java.net.DatagramSocket)4 MulticastSocket (java.net.MulticastSocket)4 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)4 MetricsSink (org.apache.hadoop.metrics2.MetricsSink)4 Configuration (org.apache.hadoop.conf.Configuration)2 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)2 AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)2 MetricsSystemImpl (org.apache.hadoop.metrics2.impl.MetricsSystemImpl)2 GangliaSink30 (org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30)2 Predicate (com.google.common.base.Predicate)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 FileInputStream (java.io.FileInputStream)1 InputStream (java.io.InputStream)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 Pattern (java.util.regex.Pattern)1 Nullable (javax.annotation.Nullable)1