use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsProcessorServiceTestBase method getMetricValuesAddToExpected.
/**
* Returns expected {@link MetricValues} of the given {@link MetricType}. Add the {@link MetricValues} to the
* {@code expected} metrics map. If the {@link MetricValues} is of type {@code MetricType.COUNTER} and is present
* in {@code expected}, increment the existing value of it.
*
* @param expectedMetricPrefix The prefix added to metric names by {@link MetricStore}
*/
protected MetricValues getMetricValuesAddToExpected(int i, long startTimeSecs, Map<String, String> metricsContext, Map<String, Long> expected, String expectedMetricPrefix, MetricType metricType) throws TopicNotFoundException, IOException {
MetricValues metric;
if (MetricType.GAUGE.equals(metricType)) {
String metricName = GAUGE_METRIC_NAME_PREFIX + i;
metric = new MetricValues(metricsContext, metricName, startTimeSecs, i, metricType);
expected.put(expectedMetricPrefix + metricName, (long) i);
} else {
metric = new MetricValues(metricsContext, COUNTER_METRIC_NAME, i, 1, metricType);
String expectedCounterMetricName = expectedMetricPrefix + COUNTER_METRIC_NAME;
Long currentValue = expected.get(expectedCounterMetricName);
if (currentValue == null) {
expected.put(expectedCounterMetricName, 1L);
} else {
expected.put(expectedCounterMetricName, currentValue + 1);
}
}
recordWriter.encode(metric, encoder);
return metric;
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class DefaultMetricStore method add.
@Override
public void add(Collection<? extends MetricValues> metricValues) throws Exception {
List<CubeFact> facts = Lists.newArrayListWithCapacity(metricValues.size());
for (MetricValues metricValue : metricValues) {
String scope = metricValue.getTags().get(Constants.Metrics.Tag.SCOPE);
List<Measurement> metrics = Lists.newArrayList();
// todo improve this logic?
for (MetricValue metric : metricValue.getMetrics()) {
String measureName = (scope == null ? "system." : scope + ".") + metric.getName();
MeasureType type = metric.getType() == MetricType.COUNTER ? MeasureType.COUNTER : MeasureType.GAUGE;
metrics.add(new Measurement(measureName, type, metric.getValue()));
}
CubeFact fact = new CubeFact(metricValue.getTimestamp()).addDimensionValues(metricValue.getTags()).addMeasurements(metrics);
facts.add(fact);
}
cube.get().add(facts);
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsMessageCallback method onReceived.
@Override
public long onReceived(Iterator<FetchedMessage> messages) {
// Decode the metrics records.
ByteBufferInputStream is = new ByteBufferInputStream(null);
List<MetricValues> records = Lists.newArrayList();
long nextOffset = 0L;
while (messages.hasNext()) {
FetchedMessage input = messages.next();
nextOffset = input.getNextOffset();
try {
MetricValues metricValues = recordReader.read(new BinaryDecoder(is.reset(input.getPayload())), recordSchema);
records.add(metricValues);
} catch (IOException e) {
LOG.warn("Failed to decode message to MetricValue. Skipped. {}", e.getMessage());
}
}
if (records.isEmpty()) {
LOG.info("No records to process.");
return nextOffset;
}
long now = System.currentTimeMillis();
try {
addProcessingStats(records, now);
metricStore.add(records);
} catch (Exception e) {
// SimpleKafkaConsumer will log the error, and continue on past these messages
throw new RuntimeException("Failed to add metrics data to a store", e);
}
recordsProcessed += records.size();
// avoid logging more than once a minute
if (now > lastLoggedMillis + TimeUnit.MINUTES.toMillis(1)) {
lastLoggedMillis = now;
LOG.debug("{} metrics records processed. Last record time: {}.", recordsProcessed, records.get(records.size() - 1).getTimestamp());
}
return nextOffset;
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class AggregatedMetricsCollectionService method getMetrics.
private Iterator<MetricValues> getMetrics(final long timestamp) {
// NOTE : emitters.asMap does not reset the access time in cache,
// so it's the preferred way to access the cache entries. as we access and emit metrics every second.
final Iterator<Map.Entry<Map<String, String>, LoadingCache<String, AggregatedMetricsEmitter>>> iterator = emitters.asMap().entrySet().iterator();
return new AbstractIterator<MetricValues>() {
@Override
protected MetricValues computeNext() {
while (iterator.hasNext()) {
Map.Entry<Map<String, String>, LoadingCache<String, AggregatedMetricsEmitter>> entry = iterator.next();
Map<String, AggregatedMetricsEmitter> metricEmitters = entry.getValue().asMap();
// +1 because we add extra metric about how many metric values did we emit in this context (see below)
List<MetricValue> metricValues = Lists.newArrayListWithCapacity(metricEmitters.size() + 1);
for (Map.Entry<String, AggregatedMetricsEmitter> emitterEntry : metricEmitters.entrySet()) {
MetricValue metricValue = emitterEntry.getValue().emit();
// skip increment by 0
if (metricValue.getType() == MetricType.COUNTER && metricValue.getValue() == 0) {
continue;
}
metricValues.add(metricValue);
}
if (metricValues.isEmpty()) {
// skip if there are no metric values to send
continue;
}
// number of emitted metrics
metricValues.add(new MetricValue("metrics.emitted.count", MetricType.COUNTER, metricValues.size() + 1));
LOG.trace("Emit metric {}", metricValues);
return new MetricValues(entry.getKey(), timestamp, metricValues);
}
return endOfData();
}
};
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class LocalMetricsCollectionService method publish.
@Override
protected void publish(Iterator<MetricValues> metrics) throws Exception {
while (metrics.hasNext()) {
MetricValues metric = metrics.next();
metricStore.add(metric);
}
}
Aggregations