Search in sources :

Example 16 with MetricsSystemImpl

use of org.apache.hadoop.metrics2.impl.MetricsSystemImpl in project hadoop by apache.

the class TestMetricsSystemImpl method testQSize.

@Test
public void testQSize() throws Exception {
    new ConfigBuilder().add("*.period", 8).add("*.queue.capacity", 2).add("test.sink.test.class", TestSink.class.getName()).save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("Test");
    final CountDownLatch proceedSignal = new CountDownLatch(1);
    final CountDownLatch reachedPutMetricSignal = new CountDownLatch(1);
    ms.start();
    try {
        MetricsSink slowSink = mock(MetricsSink.class);
        MetricsSink dataSink = mock(MetricsSink.class);
        ms.registerSink("slowSink", "The sink that will wait on putMetric", slowSink);
        ms.registerSink("dataSink", "The sink I'll use to get info about slowSink", dataSink);
        doAnswer(new Answer() {

            @Override
            public Object answer(InvocationOnMock invocation) throws Throwable {
                reachedPutMetricSignal.countDown();
                proceedSignal.await();
                return null;
            }
        }).when(slowSink).putMetrics(any(MetricsRecord.class));
        // trigger metric collection first time
        ms.onTimerEvent();
        assertTrue(reachedPutMetricSignal.await(1, TimeUnit.SECONDS));
        // Now that the slow sink is still processing the first metric,
        // its queue length should be 1 for the second collection.
        ms.onTimerEvent();
        verify(dataSink, timeout(500).times(2)).putMetrics(r1.capture());
        List<MetricsRecord> mr = r1.getAllValues();
        Number qSize = Iterables.find(mr.get(1).metrics(), new Predicate<AbstractMetric>() {

            @Override
            public boolean apply(@Nullable AbstractMetric input) {
                assert input != null;
                return input.name().equals("Sink_slowSinkQsize");
            }
        }).value();
        assertEquals(1, qSize);
    } finally {
        proceedSignal.countDown();
        ms.stop();
    }
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) Predicate(com.google.common.base.Predicate) Answer(org.mockito.stubbing.Answer) MetricsSink(org.apache.hadoop.metrics2.MetricsSink) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Nullable(javax.annotation.Nullable) Test(org.junit.Test)

Example 17 with MetricsSystemImpl

use of org.apache.hadoop.metrics2.impl.MetricsSystemImpl in project hadoop by apache.

the class RollingFileSystemSinkTestBase method initMetricsSystem.

/**
   * Set up the metrics system, start it, and return it.
   * @param path the base path for the sink
   * @param ignoreErrors whether the sink should ignore errors
   * @param allowAppend whether the sink is allowed to append to existing files
   * @param useSecureParams whether to set the principal and keytab properties
   * @return the org.apache.hadoop.metrics2.MetricsSystem
   */
protected MetricsSystem initMetricsSystem(String path, boolean ignoreErrors, boolean allowAppend, boolean useSecureParams) {
    // If the prefix is not lower case, the metrics system won't be able to
    // read any of the properties.
    String prefix = methodName.getMethodName().toLowerCase();
    ConfigBuilder builder = new ConfigBuilder().add("*.period", 10000).add(prefix + ".sink.mysink0.class", MockSink.class.getName()).add(prefix + ".sink.mysink0.basepath", path).add(prefix + ".sink.mysink0.source", "testsrc").add(prefix + ".sink.mysink0.context", "test1").add(prefix + ".sink.mysink0.ignore-error", ignoreErrors).add(prefix + ".sink.mysink0.allow-append", allowAppend).add(prefix + ".sink.mysink0.roll-offset-interval-millis", 0).add(prefix + ".sink.mysink0.roll-interval", "1h");
    if (useSecureParams) {
        builder.add(prefix + ".sink.mysink0.keytab-key", SINK_KEYTAB_FILE_KEY).add(prefix + ".sink.mysink0.principal-key", SINK_PRINCIPAL_KEY);
    }
    builder.save(TestMetricsConfig.getTestFilename("hadoop-metrics2-" + prefix));
    MetricsSystemImpl ms = new MetricsSystemImpl(prefix);
    ms.start();
    return ms;
}
Also used : ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) MetricsSystemImpl(org.apache.hadoop.metrics2.impl.MetricsSystemImpl)

Example 18 with MetricsSystemImpl

use of org.apache.hadoop.metrics2.impl.MetricsSystemImpl in project hadoop by apache.

the class TestFileSink method testFileSink.

@Test(timeout = 6000)
public void testFileSink() throws IOException {
    outFile = getTestTempFile("test-file-sink-", ".out");
    final String outPath = outFile.getAbsolutePath();
    // NB: specify large period to avoid multiple metrics snapshotting: 
    new ConfigBuilder().add("*.period", 10000).add("test.sink.mysink0.class", FileSink.class.getName()).add("test.sink.mysink0.filename", outPath).add("test.sink.mysink0.context", "test1").save(TestMetricsConfig.getTestFilename("hadoop-metrics2-test"));
    MetricsSystemImpl ms = new MetricsSystemImpl("test");
    ms.start();
    final MyMetrics1 mm1 = new MyMetrics1().registerWith(ms);
    new MyMetrics2().registerWith(ms);
    mm1.testMetric1.incr();
    mm1.testMetric2.incr(2);
    // publish the metrics
    ms.publishMetricsNow();
    ms.stop();
    ms.shutdown();
    InputStream is = null;
    ByteArrayOutputStream baos = null;
    String outFileContent = null;
    try {
        is = new FileInputStream(outFile);
        baos = new ByteArrayOutputStream((int) outFile.length());
        IOUtils.copyBytes(is, baos, 1024, true);
        outFileContent = new String(baos.toByteArray(), "UTF-8");
    } finally {
        IOUtils.cleanup(null, baos, is);
    }
    // Check the out file content. Should be something like the following:
    //1360244820087 test1.testRecord1: Context=test1, testTag1=testTagValue1, testTag2=testTagValue2, Hostname=myhost, testMetric1=1, testMetric2=2
    //1360244820089 test1.testRecord2: Context=test1, testTag22=testTagValue22, Hostname=myhost
    // Note that in the below expression we allow tags and metrics to go in arbitrary order.  
    Pattern expectedContentPattern = Pattern.compile(// line #1:
    "^\\d+\\s+test1.testRecord1:\\s+Context=test1,\\s+" + "(testTag1=testTagValue1,\\s+testTag2=testTagValue2|testTag2=testTagValue2,\\s+testTag1=testTagValue1)," + "\\s+Hostname=.*,\\s+(testMetric1=1,\\s+testMetric2=2|testMetric2=2,\\s+testMetric1=1)" + // line #2:
    "$[\\n\\r]*^\\d+\\s+test1.testRecord2:\\s+Context=test1," + "\\s+testTag22=testTagValue22,\\s+Hostname=.*$[\\n\\r]*", Pattern.MULTILINE);
    assertTrue(expectedContentPattern.matcher(outFileContent).matches());
}
Also used : Pattern(java.util.regex.Pattern) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ConfigBuilder(org.apache.hadoop.metrics2.impl.ConfigBuilder) ByteArrayOutputStream(java.io.ByteArrayOutputStream) MetricsSystemImpl(org.apache.hadoop.metrics2.impl.MetricsSystemImpl) FileInputStream(java.io.FileInputStream) Test(org.junit.Test)

Aggregations

Test (org.junit.Test)14 MetricsSystemImpl (org.apache.hadoop.metrics2.impl.MetricsSystemImpl)8 MetricsSystem (org.apache.hadoop.metrics2.MetricsSystem)7 DefaultMetricsSystem (org.apache.hadoop.metrics2.lib.DefaultMetricsSystem)5 MetricsSink (org.apache.hadoop.metrics2.MetricsSink)4 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)3 MetricsSource (org.apache.hadoop.metrics2.MetricsSource)2 ConfigBuilder (org.apache.hadoop.metrics2.impl.ConfigBuilder)2 Before (org.junit.Before)2 Predicate (com.google.common.base.Predicate)1 ByteArrayOutputStream (java.io.ByteArrayOutputStream)1 FileInputStream (java.io.FileInputStream)1 InputStream (java.io.InputStream)1 Pattern (java.util.regex.Pattern)1 Nullable (javax.annotation.Nullable)1 AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)1 AbstractGangliaSink (org.apache.hadoop.metrics2.sink.ganglia.AbstractGangliaSink)1 GangliaSink30 (org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30)1 GangliaSink31 (org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31)1 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)1