Search in sources :

Example 1 with MetricsInfo

use of org.apache.heron.spi.metricsmgr.metrics.MetricsInfo in project heron by twitter.

the class GraphiteSink method processRecord.

@Override
public void processRecord(MetricsRecord record) {
    StringBuilder lines = new StringBuilder();
    StringBuilder metricsPathPrefix = new StringBuilder();
    // Configure the hierarchical place to display the graph.
    // The metricsPathPrefix would look like:
    // {metricsPrefix}.{topologyName}.{host:port/componentName/instanceId}
    metricsPathPrefix.append(metricsPrefix).append(".").append(topologyName).append(".").append(record.getSource());
    // The record timestamp is in milliseconds while Graphite expects an epoc time in seconds.
    long timestamp = record.getTimestamp() / 1000L;
    // {metricValue} {timestamp} \n
    for (MetricsInfo metricsInfo : record.getMetrics()) {
        lines.append(metricsPathPrefix.toString() + "." + metricsInfo.getName().replace(' ', '.')).append(" ").append(metricsInfo.getValue()).append(" ").append(timestamp).append("\n");
    }
    try {
        graphite.write(lines.toString());
    } catch (IOException e) {
        LOG.log(Level.SEVERE, "Error sending metrics to Graphite. Dropping messages...", e);
        // 2. GraphiteSink.close() would contain more cleaning logic for the whole sink
        try {
            graphite.close();
        } catch (IOException e1) {
            LOG.log(Level.SEVERE, "Error closing connection to Graphite", e1);
        }
    }
}
Also used : MetricsInfo(org.apache.heron.spi.metricsmgr.metrics.MetricsInfo) IOException(java.io.IOException)

Example 2 with MetricsInfo

use of org.apache.heron.spi.metricsmgr.metrics.MetricsInfo in project heron by twitter.

the class MetricsCacheSink method processRecord.

@Override
public void processRecord(MetricsRecord record) {
    LOG.info("metricscache sink processRecord");
    // Format it into TopologyManager.PublishMetrics
    // The format of record is "host:port/componentName/instanceId"
    // So MetricsRecord.getSource().split("/") would be an array with 3 elements:
    // ["host:port", componentName, instanceId]
    String[] sources = MetricsUtil.splitRecordSource(record);
    String hostPort = sources[0];
    String componentName = sources[1];
    String instanceId = sources[2];
    TopologyManager.PublishMetrics.Builder publishMetrics = TopologyManager.PublishMetrics.newBuilder();
    for (MetricsInfo metricsInfo : tManagerMetricsFilter.filter(record.getMetrics())) {
        // We would filter out unneeded metrics
        TopologyManager.MetricDatum metricDatum = TopologyManager.MetricDatum.newBuilder().setComponentName(componentName).setInstanceId(instanceId).setName(metricsInfo.getName()).setValue(metricsInfo.getValue()).setTimestamp(record.getTimestamp()).build();
        publishMetrics.addMetrics(metricDatum);
    }
    for (ExceptionInfo exceptionInfo : record.getExceptions()) {
        String exceptionStackTrace = exceptionInfo.getStackTrace();
        String[] exceptionStackTraceLines = exceptionStackTrace.split("\r\n|[\r\n]", 3);
        String exceptionStackTraceFirstTwoLines = String.join(System.lineSeparator(), exceptionStackTraceLines[0], exceptionStackTraceLines[1]);
        TopologyManager.TmanagerExceptionLog exceptionLog = TopologyManager.TmanagerExceptionLog.newBuilder().setComponentName(componentName).setHostname(hostPort).setInstanceId(instanceId).setStacktrace(exceptionStackTraceFirstTwoLines).setLasttime(exceptionInfo.getLastTime()).setFirsttime(exceptionInfo.getFirstTime()).setCount(exceptionInfo.getCount()).setLogging(exceptionInfo.getLogging()).build();
        publishMetrics.addExceptions(exceptionLog);
    }
    metricsCommunicator.offer(publishMetrics.build());
    // Update metrics
    sinkContext.exportCountMetric(RECORD_PROCESS_COUNT, 1);
    sinkContext.exportCountMetric(METRICS_COUNT, publishMetrics.getMetricsCount());
    sinkContext.exportCountMetric(EXCEPTIONS_COUNT, publishMetrics.getExceptionsCount());
    checkCommunicator(metricsCommunicator, MAX_COMMUNICATOR_SIZE);
}
Also used : MetricsInfo(org.apache.heron.spi.metricsmgr.metrics.MetricsInfo) TopologyManager(org.apache.heron.proto.tmanager.TopologyManager) ExceptionInfo(org.apache.heron.spi.metricsmgr.metrics.ExceptionInfo)

Example 3 with MetricsInfo

use of org.apache.heron.spi.metricsmgr.metrics.MetricsInfo in project heron by twitter.

the class WebSinkTest method testGroupedMetrics.

/**
 * Testing grouped map with metrics
 */
@Test
public void testGroupedMetrics() {
    Map<String, Object> conf = new HashMap<>(defaultConf);
    conf.put("flat-metrics", "false");
    WebTestSink sink = new WebTestSink();
    sink.init(conf, context);
    for (MetricsRecord r : records) {
        sink.processRecord(r);
    }
    // Update and override MetricsRecord 1
    Iterable<MetricsInfo> infos2 = Arrays.asList(new MetricsInfo("metric_1", "3.0"), new MetricsInfo("metric_3", "1.0"));
    sink.processRecord(new MetricsRecord(records.get(0).getSource(), infos2, Collections.<ExceptionInfo>emptyList()));
    Map<String, Object> results = sink.getMetrics();
    Assert.assertEquals(2, results.size());
    @SuppressWarnings("unchecked") Map<String, Object> record1 = (Map<String, Object>) results.get("/stuff/record_1");
    @SuppressWarnings("unchecked") Map<String, Object> record2 = (Map<String, Object>) results.get("/record_2");
    Assert.assertEquals(record1.get("metric_1"), 3.0d);
    Assert.assertEquals(record1.get("metric_2"), 2.0d);
    Assert.assertEquals(record1.get("metric_3"), 1.0d);
    Assert.assertEquals(record2.get("metric_1"), 1.0d);
    Assert.assertEquals(record2.get("metric_2"), 2.0d);
}
Also used : MetricsInfo(org.apache.heron.spi.metricsmgr.metrics.MetricsInfo) HashMap(java.util.HashMap) MetricsRecord(org.apache.heron.spi.metricsmgr.metrics.MetricsRecord) HashMap(java.util.HashMap) Map(java.util.Map) ExceptionInfo(org.apache.heron.spi.metricsmgr.metrics.ExceptionInfo) Test(org.junit.Test)

Example 4 with MetricsInfo

use of org.apache.heron.spi.metricsmgr.metrics.MetricsInfo in project heron by twitter.

the class PrometheusSinkTests method before.

@Before
public void before() throws IOException {
    defaultConf = new HashMap<>();
    defaultConf.put("port", "9999");
    defaultConf.put("path", "test");
    defaultConf.put("flat-metrics", "true");
    defaultConf.put("include-topology-name", "false");
    context = Mockito.mock(SinkContext.class);
    Mockito.when(context.getTopologyName()).thenReturn("testTopology");
    Mockito.when(context.getSinkId()).thenReturn("testId");
    /*
    # example: metrics.yaml
    rules:
      - pattern: kafka.(\w+)<type=(.+), name=(.+)PerSec\w*, (.+)=(.+)><>Count
        name: kafka_$1_$2_$3_total
        attrNameSnakeCase: true
        type: COUNTER
        labels:
          "$4": "$5"
        type: COUNTER
    */
    /*
    example: metrics
      kafkaOffset/nginx-lfp-beacon/totalSpoutLag
      kafkaOffset/lads_event_meta_backfill_data/partition_10/spoutLag
     */
    List<Map<String, Object>> rules = Lists.newArrayList();
    defaultConf.put("rules", rules);
    Map<String, Object> rule1 = Maps.newHashMap();
    Map<String, Object> labels1 = Maps.newHashMap();
    rules.add(rule1);
    rule1.put("pattern", "kafkaOffset/(.+)/(.+)");
    rule1.put("name", "kafka_offset_$2");
    rule1.put("type", "COUNTER");
    rule1.put("attrNameSnakeCase", true);
    rule1.put("labels", labels1);
    labels1.put("topic", "$1");
    Map<String, Object> rule2 = Maps.newHashMap();
    Map<String, Object> labels2 = Maps.newHashMap();
    rules.add(rule2);
    rule2.put("pattern", "kafkaOffset/(.+)/partition_(\\d+)/(.+)");
    rule2.put("name", "kafka_offset_partition_$3");
    rule2.put("type", "COUNTER");
    rule2.put("labels", labels2);
    rule2.put("attrNameSnakeCase", true);
    labels2.put("topic", "$1");
    labels2.put("partition", "$2");
    Iterable<MetricsInfo> infos = Arrays.asList(new MetricsInfo("metric_1", "1.0"), new MetricsInfo("metric_2", "2.0"));
    records = Arrays.asList(newRecord("machine/component/instance_1", infos, Collections.emptyList()), newRecord("machine/component/instance_2", infos, Collections.emptyList()));
}
Also used : MetricsInfo(org.apache.heron.spi.metricsmgr.metrics.MetricsInfo) SinkContext(org.apache.heron.spi.metricsmgr.sink.SinkContext) HashMap(java.util.HashMap) Map(java.util.Map) Before(org.junit.Before)

Example 5 with MetricsInfo

use of org.apache.heron.spi.metricsmgr.metrics.MetricsInfo in project heron by twitter.

the class PrometheusSinkTests method testApacheStormKafkaMetrics.

@Test
public void testApacheStormKafkaMetrics() throws IOException {
    Iterable<MetricsInfo> infos = Arrays.asList(new MetricsInfo("kafkaOffset/event_data/partition_0/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_10/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_0/earliestTimeOffset", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalRecordsInPartitions", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalSpoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_2/spoutLag", "1.0"));
    records = Arrays.asList(newRecord("shared-aurora-036:31/spout-release-1/container_1_spout-release-1_31", infos, Collections.emptyList()));
    PrometheusTestSink sink = new PrometheusTestSink();
    sink.init(defaultConf, context);
    for (MetricsRecord r : records) {
        sink.processRecord(r);
    }
    final String topology = "testTopology";
    final List<String> expectedLines = Arrays.asList(createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "10", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_earliest_time_offset", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_records_in_partitions", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_spout_lag", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "2", "1.0"));
    final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n")));
    assertEquals(expectedLines.size(), generatedLines.size());
    expectedLines.forEach((String line) -> {
        assertTrue(generatedLines.contains(line));
    });
}
Also used : MetricsInfo(org.apache.heron.spi.metricsmgr.metrics.MetricsInfo) MetricsRecord(org.apache.heron.spi.metricsmgr.metrics.MetricsRecord) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

MetricsInfo (org.apache.heron.spi.metricsmgr.metrics.MetricsInfo)12 ExceptionInfo (org.apache.heron.spi.metricsmgr.metrics.ExceptionInfo)7 MetricsRecord (org.apache.heron.spi.metricsmgr.metrics.MetricsRecord)7 Test (org.junit.Test)4 HashMap (java.util.HashMap)3 ArrayList (java.util.ArrayList)2 HashSet (java.util.HashSet)2 Map (java.util.Map)2 TopologyManager (org.apache.heron.proto.tmanager.TopologyManager)2 SinkContext (org.apache.heron.spi.metricsmgr.sink.SinkContext)2 Before (org.junit.Before)2 JsonProcessingException (com.fasterxml.jackson.core.JsonProcessingException)1 IOException (java.io.IOException)1 LinkedList (java.util.LinkedList)1 CountDownLatch (java.util.concurrent.CountDownLatch)1 Metrics (org.apache.heron.proto.system.Metrics)1