Search in sources :

Example 26 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class GangliaSink30 method putMetrics.

@Override
public void putMetrics(MetricsRecord record) {
    // of metrics of sparse (only on change) publish of metrics
    try {
        String recordName = record.name();
        String contextName = record.context();
        StringBuilder sb = new StringBuilder();
        sb.append(contextName);
        sb.append('.');
        sb.append(recordName);
        appendPrefix(record, sb);
        String groupName = sb.toString();
        sb.append('.');
        int sbBaseLen = sb.length();
        String type = null;
        GangliaSlope slopeFromMetric = null;
        GangliaSlope calculatedSlope = null;
        Record cachedMetrics = null;
        // reset the buffer to the beginning
        resetBuffer();
        if (!isSupportSparseMetrics()) {
            // for sending dense metrics, update metrics cache
            // and get the updated data
            cachedMetrics = metricsCache.update(record);
            if (cachedMetrics != null && cachedMetrics.metricsEntrySet() != null) {
                for (Map.Entry<String, AbstractMetric> entry : cachedMetrics.metricsEntrySet()) {
                    AbstractMetric metric = entry.getValue();
                    sb.append(metric.name());
                    String name = sb.toString();
                    // visit the metric to identify the Ganglia type and
                    // slope
                    metric.visit(gangliaMetricVisitor);
                    type = gangliaMetricVisitor.getType();
                    slopeFromMetric = gangliaMetricVisitor.getSlope();
                    GangliaConf gConf = getGangliaConfForMetric(name);
                    calculatedSlope = calculateSlope(gConf, slopeFromMetric);
                    // send metric to Ganglia
                    emitMetric(groupName, name, type, metric.value().toString(), gConf, calculatedSlope);
                    // reset the length of the buffer for next iteration
                    sb.setLength(sbBaseLen);
                }
            }
        } else {
            // we support sparse updates
            Collection<AbstractMetric> metrics = (Collection<AbstractMetric>) record.metrics();
            if (metrics.size() > 0) {
                // we got metrics. so send the latest
                for (AbstractMetric metric : record.metrics()) {
                    sb.append(metric.name());
                    String name = sb.toString();
                    // visit the metric to identify the Ganglia type and
                    // slope
                    metric.visit(gangliaMetricVisitor);
                    type = gangliaMetricVisitor.getType();
                    slopeFromMetric = gangliaMetricVisitor.getSlope();
                    GangliaConf gConf = getGangliaConfForMetric(name);
                    calculatedSlope = calculateSlope(gConf, slopeFromMetric);
                    // send metric to Ganglia
                    emitMetric(groupName, name, type, metric.value().toString(), gConf, calculatedSlope);
                    // reset the length of the buffer for next iteration
                    sb.setLength(sbBaseLen);
                }
            }
        }
    } catch (IOException io) {
        throw new MetricsException("Failed to putMetrics", io);
    }
}
Also used : AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) IOException(java.io.IOException) Collection(java.util.Collection) Record(org.apache.hadoop.metrics2.util.MetricsCache.Record) MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) MetricsException(org.apache.hadoop.metrics2.MetricsException) HashMap(java.util.HashMap) Map(java.util.Map)

Example 27 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestMetricsCache method testUpdate.

@SuppressWarnings("deprecation")
@Test
public void testUpdate() {
    MetricsCache cache = new MetricsCache();
    MetricsRecord mr = makeRecord("r", Arrays.asList(makeTag("t", "tv")), Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1)));
    MetricsCache.Record cr = cache.update(mr);
    verify(mr).name();
    verify(mr).tags();
    verify(mr).metrics();
    assertEquals("same record size", cr.metrics().size(), ((Collection<AbstractMetric>) mr.metrics()).size());
    assertEquals("same metric value", 0, cr.getMetric("m"));
    MetricsRecord mr2 = makeRecord("r", Arrays.asList(makeTag("t", "tv")), Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42)));
    cr = cache.update(mr2);
    assertEquals("contains 3 metric", 3, cr.metrics().size());
    checkMetricValue("updated metric value", cr, "m", 2);
    checkMetricValue("old metric value", cr, "m1", 1);
    checkMetricValue("new metric value", cr, "m2", 42);
    MetricsRecord mr3 = makeRecord("r", // different tag value
    Arrays.asList(makeTag("t", "tv3")), Arrays.asList(makeMetric("m3", 3)));
    // should get a new record
    cr = cache.update(mr3);
    assertEquals("contains 1 metric", 1, cr.metrics().size());
    checkMetricValue("updated metric value", cr, "m3", 3);
    // tags cache should be empty so far
    assertEquals("no tags", 0, cr.tags().size());
    // until now
    cr = cache.update(mr3, true);
    assertEquals("Got 1 tag", 1, cr.tags().size());
    assertEquals("Tag value", "tv3", cr.getTag("t"));
    checkMetricValue("Metric value", cr, "m3", 3);
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) Test(org.junit.Test)

Example 28 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class TestMetricsCache method makeRecord.

private MetricsRecord makeRecord(String name, Collection<MetricsTag> tags, Collection<AbstractMetric> metrics) {
    MetricsRecord mr = mock(MetricsRecord.class);
    when(mr.name()).thenReturn(name);
    when(mr.tags()).thenReturn(tags);
    when(mr.metrics()).thenReturn(metrics);
    return mr;
}
Also used : MetricsRecord(org.apache.hadoop.metrics2.MetricsRecord)

Example 29 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class FileSink method putMetrics.

@Override
public void putMetrics(MetricsRecord record) {
    writer.print(record.timestamp());
    writer.print(" ");
    writer.print(record.context());
    writer.print(".");
    writer.print(record.name());
    String separator = ": ";
    for (MetricsTag tag : record.tags()) {
        writer.print(separator);
        separator = ", ";
        writer.print(tag.name());
        writer.print("=");
        writer.print(tag.value());
    }
    for (AbstractMetric metric : record.metrics()) {
        writer.print(separator);
        separator = ", ";
        writer.print(metric.name());
        writer.print("=");
        writer.print(metric.value());
    }
    writer.println();
}
Also used : AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsTag(org.apache.hadoop.metrics2.MetricsTag)

Example 30 with AbstractMetric

use of org.apache.hadoop.metrics2.AbstractMetric in project hadoop by apache.

the class KafkaSink method putMetrics.

@Override
public void putMetrics(MetricsRecord record) {
    if (producer == null) {
        throw new MetricsException("Producer in KafkaSink is null!");
    }
    // Create the json object.
    StringBuilder jsonLines = new StringBuilder();
    long timestamp = record.timestamp();
    Instant instant = Instant.ofEpochMilli(timestamp);
    LocalDateTime ldt = LocalDateTime.ofInstant(instant, zoneId);
    String date = ldt.format(dateFormat);
    String time = ldt.format(timeFormat);
    // Collect datapoints and populate the json object.
    jsonLines.append("{\"hostname\": \"" + hostname);
    jsonLines.append("\", \"timestamp\": " + timestamp);
    jsonLines.append(", \"date\": \"" + date);
    jsonLines.append("\",\"time\": \"" + time);
    jsonLines.append("\",\"name\": \"" + record.name() + "\" ");
    for (MetricsTag tag : record.tags()) {
        jsonLines.append(", \"" + tag.name().toString().replaceAll("[\\p{Cc}]", "") + "\": ");
        jsonLines.append(" \"" + tag.value().toString() + "\"");
    }
    for (AbstractMetric metric : record.metrics()) {
        jsonLines.append(", \"" + metric.name().toString().replaceAll("[\\p{Cc}]", "") + "\": ");
        jsonLines.append(" \"" + metric.value().toString() + "\"");
    }
    jsonLines.append("}");
    LOG.debug("kafka message: " + jsonLines.toString());
    // Create the record to be sent from the json.
    ProducerRecord<Integer, byte[]> data = new ProducerRecord<Integer, byte[]>(topic, jsonLines.toString().getBytes(Charset.forName("UTF-8")));
    // Send the data to the Kafka broker. Here is an example of this data:
    // {"hostname": "...", "timestamp": 1436913651516,
    // "date": "2015-6-14","time": "22:40:51","context": "yarn","name":
    // "QueueMetrics, "running_0": "1", "running_60": "0", "running_300": "0",
    // "running_1440": "0", "AppsSubmitted": "1", "AppsRunning": "1",
    // "AppsPending": "0", "AppsCompleted": "0", "AppsKilled": "0",
    // "AppsFailed": "0", "AllocatedMB": "134656", "AllocatedVCores": "132",
    // "AllocatedContainers": "132", "AggregateContainersAllocated": "132",
    // "AggregateContainersReleased": "0", "AvailableMB": "0",
    // "AvailableVCores": "0", "PendingMB": "275456", "PendingVCores": "269",
    // "PendingContainers": "269", "ReservedMB": "0", "ReservedVCores": "0",
    // "ReservedContainers": "0", "ActiveUsers": "1", "ActiveApplications": "1"}
    Future<RecordMetadata> future = producer.send(data);
    jsonLines.setLength(0);
    try {
        future.get();
    } catch (InterruptedException e) {
        throw new MetricsException("Error sending data", e);
    } catch (ExecutionException e) {
        throw new MetricsException("Error sending data", e);
    }
}
Also used : LocalDateTime(java.time.LocalDateTime) Instant(java.time.Instant) AbstractMetric(org.apache.hadoop.metrics2.AbstractMetric) MetricsTag(org.apache.hadoop.metrics2.MetricsTag) RecordMetadata(org.apache.kafka.clients.producer.RecordMetadata) ProducerRecord(org.apache.kafka.clients.producer.ProducerRecord) MetricsException(org.apache.hadoop.metrics2.MetricsException) ExecutionException(java.util.concurrent.ExecutionException)

Aggregations

AbstractMetric (org.apache.hadoop.metrics2.AbstractMetric)32 MetricsTag (org.apache.hadoop.metrics2.MetricsTag)19 MetricsRecord (org.apache.hadoop.metrics2.MetricsRecord)17 Test (org.junit.Test)12 ArrayList (java.util.ArrayList)8 HashSet (java.util.HashSet)8 IOException (java.io.IOException)7 GraphiteSink (org.apache.hadoop.metrics2.sink.GraphiteSink)4 MetricsException (org.apache.hadoop.metrics2.MetricsException)3 Matchers.anyString (org.mockito.Matchers.anyString)3 DatagramPacket (java.net.DatagramPacket)2 DatagramSocket (java.net.DatagramSocket)2 HashMap (java.util.HashMap)2 MetricsVisitor (org.apache.hadoop.metrics2.MetricsVisitor)2 MetricsCollectorImpl (org.apache.hadoop.metrics2.impl.MetricsCollectorImpl)2 StatsDSink (org.apache.hadoop.metrics2.sink.StatsDSink)2 StatsD (org.apache.hadoop.metrics2.sink.StatsDSink.StatsD)2 ProducerRecord (org.apache.kafka.clients.producer.ProducerRecord)2 RecordMetadata (org.apache.kafka.clients.producer.RecordMetadata)2 Predicate (com.google.common.base.Predicate)1