Search in sources :

Example 1 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestMetricsSystemImpl method testInitFirstVerifyStopInvokedImmediately.

@Test
public void testInitFirstVerifyStopInvokedImmediately() throws Exception {
    DefaultMetricsSystem.shutdown();
    new ConfigBuilder().add("*.period", 8).add("test.sink.test.class", TestSink.class.getName()).add("test.*.source.filter.exclude", "s0").add("test.source.s1.metric.filter.exclude", "X*").add("test.sink.sink1.metric.filter.exclude", "Y*").add("test.sink.sink2.metric.filter.exclude", "Y*").save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    ms.start();
    ms.register("s0", "s0 desc", new TestSource("s0rec"));
    TestSource s1 = ms.register("s1", "s1 desc", new TestSource("s1rec"));
    s1.c1.incr();
    s1.xxx.incr();
    s1.g1.set(2);
    s1.yyy.incr(2);
    s1.s1.add(0);
    MetricsSink sink1 = mock(MetricsSink.class);
    MetricsSink sink2 = mock(MetricsSink.class);
    ms.registerSink("sink1", "sink1 desc", sink1);
    ms.registerSink("sink2", "sink2 desc", sink2);
    // publish the metrics
    ms.publishMetricsNow();
    ms.stop();
    ms.shutdown();
    //When we call stop, at most two sources will be consumed by each sink thread.
    verify(sink1, atMost(2)).putMetrics(r1.capture());
    List<MetricsRecord> mr1 = r1.getAllValues();
    verify(sink2, atMost(2)).putMetrics(r2.capture());
    List<MetricsRecord> mr2 = r2.getAllValues();
    if (mr1.size() != 0 && mr2.size() != 0) {
        checkMetricsRecords(mr1);
        assertEquals("output", mr1, mr2);
    } else if (mr1.size() != 0) {
        checkMetricsRecords(mr1);
    } else if (mr2.size() != 0) {
        checkMetricsRecords(mr2);
    }
}
Also used : MetricsSink(org.apache.hadoop.metrics2.MetricsSink) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) Test(org.junit.Test)

Example 2 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestMetricsSystemImpl method testHangOnSinkRead.

/**
   * HADOOP-11932
   */
@Test(timeout = 5000)
public void testHangOnSinkRead() throws Exception {
    new ConfigBuilder().add("*.period", 8).add("test.sink.test.class", TestSink.class.getName()).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    ms.start();
    try {
        CountDownLatch collectingLatch = new CountDownLatch(1);
        MetricsSink sink = new TestClosableSink(collectingLatch);
        ms.registerSink("closeableSink", "The sink will be used to test closeability", sink);
        // trigger metric collection first time
        ms.onTimerEvent();
        // Make sure that sink is collecting metrics
        assertTrue(collectingLatch.await(1, TimeUnit.SECONDS));
    } finally {
        ms.stop();
    }
}
Also used : MetricsSink(org.apache.hadoop.metrics2.MetricsSink) Test(org.junit.Test)

Example 3 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestRollingFileSystemSink method testGetRollInterval.

/**
   * Test whether the roll interval is correctly calculated from the
   * configuration settings.
   */
@Test
public void testGetRollInterval() {
    doTestGetRollInterval(1, new String[] { "m", "min", "minute", "minutes" }, 60 * 1000L);
    doTestGetRollInterval(1, new String[] { "h", "hr", "hour", "hours" }, 60 * 60 * 1000L);
    doTestGetRollInterval(1, new String[] { "d", "day", "days" }, 24 * 60 * 60 * 1000L);
    ConfigBuilder builder = new ConfigBuilder();
    SubsetConfiguration conf = builder.add("sink.roll-interval", "1").subset("sink");
    // We can reuse the same sink evry time because we're setting the same
    // property every time.
    RollingFileSystemSink sink = new RollingFileSystemSink();
    sink.init(conf);
    assertEquals(3600000L, sink.getRollInterval());
    for (char c : "abcefgijklnopqrtuvwxyz".toCharArray()) {
        builder = new ConfigBuilder();
        conf = builder.add("sink.roll-interval", "90 " + c).subset("sink");
        try {
            sink.init(conf);
            sink.getRollInterval();
            fail("Allowed flush interval with bad units: " + c);
        } catch (MetricsException ex) {
        // Expected
        }
    }
}
Also used : ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) MetricsException(org.apache.hadoop.metrics2.MetricsException) SubsetConfiguration(org.apache.commons.configuration2.SubsetConfiguration) Test(org.junit.Test)

Example 4 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestRollingFileSystemSink method testInit.

@Test
public void testInit() {
    ConfigBuilder builder = new ConfigBuilder();
    SubsetConfiguration conf = builder.add("sink.roll-interval", "10m").add("sink.roll-offset-interval-millis", "1").add("sink.basepath", "path").add("sink.ignore-error", "true").add("sink.allow-append", "true").add("sink.source", "src").subset("sink");
    RollingFileSystemSink sink = new RollingFileSystemSink();
    sink.init(conf);
    assertEquals("The roll interval was not set correctly", sink.rollIntervalMillis, 600000);
    assertEquals("The roll offset interval was not set correctly", sink.rollOffsetIntervalMillis, 1);
    assertEquals("The base path was not set correctly", sink.basePath, new Path("path"));
    assertEquals("ignore-error was not set correctly", sink.ignoreError, true);
    assertEquals("allow-append was not set correctly", sink.allowAppend, true);
    assertEquals("The source was not set correctly", sink.source, "src");
}
Also used : Path(org.apache.hadoop.fs.Path) ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) SubsetConfiguration(org.apache.commons.configuration2.SubsetConfiguration) Test(org.junit.Test)

Example 5 with ConfigBuilder

use of org.apache.hadoop.metrics2.impl.ConfigBuilder in project hadoop by apache.

the class TestGangliaSink method testShouldCreateDatagramSocketByDefault.

@Test
public void testShouldCreateDatagramSocketByDefault() throws Exception {
    SubsetConfiguration conf = new ConfigBuilder().subset("test.sink.ganglia");
    GangliaSink30 gangliaSink = new GangliaSink30();
    gangliaSink.init(conf);
    DatagramSocket socket = gangliaSink.getDatagramSocket();
    assertFalse("Did not create DatagramSocket", socket == null || socket instanceof MulticastSocket);
}
Also used : MulticastSocket(java.net.MulticastSocket) DatagramSocket(java.net.DatagramSocket) ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) SubsetConfiguration(org.apache.commons.configuration2.SubsetConfiguration) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)20 ConfigBuilder (org.apache.hadoop.metrics2.impl.ConfigBuilder)16 SubsetConfiguration (org.apache.commons.configuration2.SubsetConfiguration)11 DatagramSocket (java.net.DatagramSocket)4 MulticastSocket (java.net.MulticastSocket)4 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)4 MetricsSink (org.apache.hadoop.metrics2.MetricsSink)4 Configuration (org.apache.hadoop.conf.Configuration)2 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)2 AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)2 MetricsSystemImpl (org.apache.hadoop.metrics2.impl.MetricsSystemImpl)2 GangliaSink30 (org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30)2 Predicate (com.google.common.base.Predicate)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 FileInputStream (java.io.FileInputStream)1 InputStream (java.io.InputStream)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 Pattern (java.util.regex.Pattern)1 Nullable (javax.annotation.Nullable)1