use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsMessageCallback method onReceived.
@Override
public long onReceived(Iterator<FetchedMessage> messages) {
// Decode the metrics records.
ByteBufferInputStream is = new ByteBufferInputStream(null);
List<MetricValues> records = Lists.newArrayList();
long nextOffset = 0L;
while (messages.hasNext()) {
FetchedMessage input = messages.next();
nextOffset = input.getNextOffset();
try {
MetricValues metricValues = recordReader.read(new BinaryDecoder(is.reset(input.getPayload())), recordSchema);
records.add(metricValues);
} catch (IOException e) {
LOG.warn("Failed to decode message to MetricValue. Skipped. {}", e.getMessage());
}
}
if (records.isEmpty()) {
LOG.info("No records to process.");
return nextOffset;
}
long now = System.currentTimeMillis();
try {
addProcessingStats(records, now);
metricStore.add(records);
} catch (Exception e) {
// SimpleKafkaConsumer will log the error, and continue on past these messages
throw new RuntimeException("Failed to add metrics data to a store", e);
}
recordsProcessed += records.size();
// avoid logging more than once a minute
if (now > lastLoggedMillis + TimeUnit.MINUTES.toMillis(1)) {
lastLoggedMillis = now;
LOG.debug("{} metrics records processed. Last record time: {}.", recordsProcessed, records.get(records.size() - 1).getTimestamp());
}
return nextOffset;
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsProcessorServiceTest method addKafkaMetrics.
private void addKafkaMetrics(int metricIndex, long startTimeSecs, Map<String, String> metricsContext, Map<String, Long> expected, KafkaPublisher.Preparer preparer, MetricType metricType) throws IOException, TopicNotFoundException {
MetricValues metric = getMetricValuesAddToExpected(metricIndex, startTimeSecs, metricsContext, expected, SYSTEM_METRIC_PREFIX, metricType);
// partitioning by the context
preparer.add(ByteBuffer.wrap(encoderOutputStream.toByteArray()), metric.getTags().hashCode());
encoderOutputStream.reset();
LOG.info("Published metric: {}", metric);
encoderOutputStream.reset();
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class StreamSizeSchedulerTest method createMetricsPublisher.
@Override
protected StreamMetricsPublisher createMetricsPublisher(final StreamId streamId) {
final NotificationFeedId feed = new NotificationFeedId(streamId.getNamespace(), Constants.Notification.Stream.STREAM_FEED_CATEGORY, streamId.getStream() + "Size");
return new StreamMetricsPublisher() {
long totalSize;
@Override
public void increment(long size) throws Exception {
metricStore.add(new MetricValues(ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, streamId.getNamespace(), Constants.Metrics.Tag.STREAM, streamId.getStream()), "collect.bytes", TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis()), size, MetricType.COUNTER));
totalSize += size;
notificationService.publish(feed, new StreamSizeNotification(System.currentTimeMillis(), totalSize));
}
};
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MetricsHandlerTestRun method testAutoResolutions.
@Test
public void testAutoResolutions() throws Exception {
long start = 1;
Map<String, String> sliceBy = getFlowletContext("resolutions", "WordCount1", "WordCounter", "run1", "splitter");
// 1 second
metricStore.add(new MetricValues(sliceBy, "reads", start, 1, MetricType.COUNTER));
// 30 second
metricStore.add(new MetricValues(sliceBy, "reads", start + 30, 1, MetricType.COUNTER));
// 1 minute
metricStore.add(new MetricValues(sliceBy, "reads", start + 60, 1, MetricType.COUNTER));
// 10 minutes
metricStore.add(new MetricValues(sliceBy, "reads", start + 600, 1, MetricType.COUNTER));
// 1 hour
metricStore.add(new MetricValues(sliceBy, "reads", start + 3600, 1, MetricType.COUNTER));
// 10 hour
metricStore.add(new MetricValues(sliceBy, "reads", start + 36000, 1, MetricType.COUNTER));
// seconds
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&start=" + start + "&end=" + (start + 600), 4, 4);
// minutes
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&start=" + (start - 1) + "&end=" + (start + 600), 3, 4);
// minutes
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&start=" + (start - 1) + "&end=" + (start + 3600), 4, 5);
// hours
verifyRangeQueryResult("/v3/metrics/query?" + getTags("resolutions", "WordCount1", "WordCounter", "splitter") + "&metric=system.reads&resolution=auto&start=" + (start - 1) + "&end=" + (start + 36000), 3, 6);
// delete the added metrics for testing auto resolutions
MetricDeleteQuery deleteQuery = new MetricDeleteQuery(start, (start + 36000), sliceBy);
metricStore.delete(deleteQuery);
}
use of co.cask.cdap.api.metrics.MetricValues in project cdap by caskdata.
the class MessagingMetricsCollectionServiceTest method assertMetricsFromMessaging.
private void assertMetricsFromMessaging(final Schema schema, ReflectionDatumReader recordReader, Table<String, String, Long> expected) throws InterruptedException, TopicNotFoundException, IOException {
// Consume from kafka
final Map<String, MetricValues> metrics = Maps.newHashMap();
ByteBufferInputStream is = new ByteBufferInputStream(null);
for (int i = 0; i < PARTITION_SIZE; i++) {
TopicId topicId = NamespaceId.SYSTEM.topic(TOPIC_PREFIX + i);
try (CloseableIterator<RawMessage> iterator = messagingService.prepareFetch(topicId).fetch()) {
while (iterator.hasNext()) {
RawMessage message = iterator.next();
MetricValues metricsRecord = (MetricValues) recordReader.read(new BinaryDecoder(is.reset(ByteBuffer.wrap(message.getPayload()))), schema);
StringBuilder flattenContext = new StringBuilder();
// for verifying expected results, sorting tags
Map<String, String> tags = Maps.newTreeMap();
tags.putAll(metricsRecord.getTags());
for (Map.Entry<String, String> tag : tags.entrySet()) {
flattenContext.append(tag.getKey()).append(".").append(tag.getValue()).append(".");
}
// removing trailing "."
if (flattenContext.length() > 0) {
flattenContext.deleteCharAt(flattenContext.length() - 1);
}
metrics.put(flattenContext.toString(), metricsRecord);
}
} catch (IOException e) {
LOG.info("Failed to decode message to MetricValue. Skipped. {}", e.getMessage());
}
}
Assert.assertEquals(expected.rowKeySet().size(), metrics.size());
checkReceivedMetrics(expected, metrics);
}
Aggregations