use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.
the class StatsDSink method putMetrics.
@Override
public void putMetrics(MetricsRecord record) {
String hn = hostName;
String ctx = record.context();
String sn = serviceName;
for (MetricsTag tag : record.tags()) {
if (tag.info().name().equals(MsInfo.Hostname.name()) && tag.value() != null) {
hn = tag.value();
} else if (tag.info().name().equals(MsInfo.Context.name()) && tag.value() != null) {
ctx = tag.value();
} else if (tag.info().name().equals(MsInfo.ProcessName.name()) && tag.value() != null) {
sn = tag.value();
}
}
StringBuilder buf = new StringBuilder();
if (!skipHostname && hn != null) {
int idx = hn.indexOf(".");
if (idx == -1) {
buf.append(hn).append(PERIOD);
} else {
buf.append(hn.substring(0, idx)).append(PERIOD);
}
}
buf.append(sn).append(PERIOD);
buf.append(ctx).append(PERIOD);
buf.append(record.name().replaceAll("\\.", "-")).append(PERIOD);
// Collect datapoints.
for (AbstractMetric metric : record.metrics()) {
String type = null;
if (metric.type().equals(MetricType.COUNTER)) {
type = "c";
} else if (metric.type().equals(MetricType.GAUGE)) {
type = "g";
}
StringBuilder line = new StringBuilder();
line.append(buf.toString()).append(metric.name().replace(' ', '_')).append(":").append(metric.value()).append("|").append(type);
writeMetric(line.toString());
}
}
use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.
the class GangliaSink30 method putMetrics.
@Override
public void putMetrics(MetricsRecord record) {
// of metrics of sparse (only on change) publish of metrics
try {
String recordName = record.name();
String contextName = record.context();
StringBuilder sb = new StringBuilder();
sb.append(contextName);
sb.append('.');
sb.append(recordName);
appendPrefix(record, sb);
String groupName = sb.toString();
sb.append('.');
int sbBaseLen = sb.length();
String type = null;
GangliaSlope slopeFromMetric = null;
GangliaSlope calculatedSlope = null;
Record cachedMetrics = null;
// reset the buffer to the beginning
resetBuffer();
if (!isSupportSparseMetrics()) {
// for sending dense metrics, update metrics cache
// and get the updated data
cachedMetrics = metricsCache.update(record);
if (cachedMetrics != null && cachedMetrics.metricsEntrySet() != null) {
for (Map.Entry<String, AbstractMetric> entry : cachedMetrics.metricsEntrySet()) {
AbstractMetric metric = entry.getValue();
sb.append(metric.name());
String name = sb.toString();
// visit the metric to identify the Ganglia type and
// slope
metric.visit(gangliaMetricVisitor);
type = gangliaMetricVisitor.getType();
slopeFromMetric = gangliaMetricVisitor.getSlope();
GangliaConf gConf = getGangliaConfForMetric(name);
calculatedSlope = calculateSlope(gConf, slopeFromMetric);
// send metric to Ganglia
emitMetric(groupName, name, type, metric.value().toString(), gConf, calculatedSlope);
// reset the length of the buffer for next iteration
sb.setLength(sbBaseLen);
}
}
} else {
// we support sparse updates
Collection<AbstractMetric> metrics = (Collection<AbstractMetric>) record.metrics();
if (metrics.size() > 0) {
// we got metrics. so send the latest
for (AbstractMetric metric : record.metrics()) {
sb.append(metric.name());
String name = sb.toString();
// visit the metric to identify the Ganglia type and
// slope
metric.visit(gangliaMetricVisitor);
type = gangliaMetricVisitor.getType();
slopeFromMetric = gangliaMetricVisitor.getSlope();
GangliaConf gConf = getGangliaConfForMetric(name);
calculatedSlope = calculateSlope(gConf, slopeFromMetric);
// send metric to Ganglia
emitMetric(groupName, name, type, metric.value().toString(), gConf, calculatedSlope);
// reset the length of the buffer for next iteration
sb.setLength(sbBaseLen);
}
}
}
} catch (IOException io) {
throw new MetricsException("Failed to putMetrics", io);
}
}
use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.
the class GangliaSink30 method appendPrefix.
@InterfaceAudience.Private
public void appendPrefix(MetricsRecord record, StringBuilder sb) {
String contextName = record.context();
Collection<MetricsTag> tags = record.tags();
if (useTagsMap.containsKey(contextName)) {
Set<String> useTags = useTagsMap.get(contextName);
for (MetricsTag t : tags) {
if (useTags == null || useTags.contains(t.name())) {
if (t.info() != MsInfo.Context && t.info() != MsInfo.Hostname && t.value() != null) {
sb.append('.').append(t.name()).append('=').append(t.value());
}
}
}
}
}
use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.
the class TestMetricsCache method testUpdate.
@SuppressWarnings("deprecation")
@Test
public void testUpdate() {
MetricsCache cache = new MetricsCache();
MetricsRecord mr = makeRecord("r", Arrays.asList(makeTag("t", "tv")), Arrays.asList(makeMetric("m", 0), makeMetric("m1", 1)));
MetricsCache.Record cr = cache.update(mr);
verify(mr).name();
verify(mr).tags();
verify(mr).metrics();
assertEquals("same record size", cr.metrics().size(), ((Collection<AbstractMetric>) mr.metrics()).size());
assertEquals("same metric value", 0, cr.getMetric("m"));
MetricsRecord mr2 = makeRecord("r", Arrays.asList(makeTag("t", "tv")), Arrays.asList(makeMetric("m", 2), makeMetric("m2", 42)));
cr = cache.update(mr2);
assertEquals("contains 3 metric", 3, cr.metrics().size());
checkMetricValue("updated metric value", cr, "m", 2);
checkMetricValue("old metric value", cr, "m1", 1);
checkMetricValue("new metric value", cr, "m2", 42);
MetricsRecord mr3 = makeRecord("r", // different tag value
Arrays.asList(makeTag("t", "tv3")), Arrays.asList(makeMetric("m3", 3)));
// should get a new record
cr = cache.update(mr3);
assertEquals("contains 1 metric", 1, cr.metrics().size());
checkMetricValue("updated metric value", cr, "m3", 3);
// tags cache should be empty so far
assertEquals("no tags", 0, cr.tags().size());
// until now
cr = cache.update(mr3, true);
assertEquals("Got 1 tag", 1, cr.tags().size());
assertEquals("Tag value", "tv3", cr.getTag("t"));
checkMetricValue("Metric value", cr, "m3", 3);
}
use of org.apache.hadoop.metrics2.MetricsRecord in project hadoop by apache.
the class TestMetricsCache method testGet.
@SuppressWarnings("deprecation")
@Test
public void testGet() {
MetricsCache cache = new MetricsCache();
assertNull("empty", cache.get("r", Arrays.asList(makeTag("t", "t"))));
MetricsRecord mr = makeRecord("r", Arrays.asList(makeTag("t", "t")), Arrays.asList(makeMetric("m", 1)));
cache.update(mr);
MetricsCache.Record cr = cache.get("r", mr.tags());
LOG.debug("tags=" + mr.tags() + " cr=" + cr);
assertNotNull("Got record", cr);
assertEquals("contains 1 metric", 1, cr.metrics().size());
checkMetricValue("new metric value", cr, "m", 1);
}
Aggregations