use of org.apache.heron.spi.metricsmgr.metrics.MetricsRecord in project heron by twitter.
the class PrometheusSinkTests method testResponse.
@Test
public void testResponse() throws IOException {
PrometheusTestSink sink = new PrometheusTestSink();
sink.init(defaultConf, context);
for (MetricsRecord r : records) {
sink.processRecord(r);
}
final String topology = "testTopology";
final List<String> expectedLines = Arrays.asList(createMetric(topology, "component", "instance_1", "metric_1", "1.0"), createMetric(topology, "component", "instance_1", "metric_2", "2.0"), createMetric(topology, "component", "instance_1", "metric_1", "1.0"), createMetric(topology, "component", "instance_1", "metric_2", "2.0"));
final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n")));
assertEquals(expectedLines.size(), generatedLines.size());
expectedLines.forEach((String line) -> {
assertTrue(generatedLines.contains(line));
});
}
use of org.apache.heron.spi.metricsmgr.metrics.MetricsRecord in project heron by twitter.
the class PrometheusSinkTests method testApacheStormKafkaMetrics.
@Test
public void testApacheStormKafkaMetrics() throws IOException {
Iterable<MetricsInfo> infos = Arrays.asList(new MetricsInfo("kafkaOffset/event_data/partition_0/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_10/spoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_0/earliestTimeOffset", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalRecordsInPartitions", "1.0"), new MetricsInfo("kafkaOffset/event_data/totalSpoutLag", "1.0"), new MetricsInfo("kafkaOffset/event_data/partition_2/spoutLag", "1.0"));
records = Arrays.asList(newRecord("shared-aurora-036:31/spout-release-1/container_1_spout-release-1_31", infos, Collections.emptyList()));
PrometheusTestSink sink = new PrometheusTestSink();
sink.init(defaultConf, context);
for (MetricsRecord r : records) {
sink.processRecord(r);
}
final String topology = "testTopology";
final List<String> expectedLines = Arrays.asList(createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "10", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_earliest_time_offset", "event_data", "0", "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_records_in_partitions", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_total_spout_lag", "event_data", null, "1.0"), createOffsetMetric(topology, "spout-release-1", "container_1_spout-release-1_31", "kafka_offset_partition_spout_lag", "event_data", "2", "1.0"));
final Set<String> generatedLines = new HashSet<>(Arrays.asList(new String(sink.generateResponse()).split("\n")));
assertEquals(expectedLines.size(), generatedLines.size());
expectedLines.forEach((String line) -> {
assertTrue(generatedLines.contains(line));
});
}
use of org.apache.heron.spi.metricsmgr.metrics.MetricsRecord in project heron by twitter.
the class MetricsManagerServer method handlePublisherPublishMessage.
private void handlePublisherPublishMessage(Metrics.MetricPublisher request, Metrics.MetricPublisherPublishMessage message) {
if (message.getMetricsCount() <= 0 && message.getExceptionsCount() <= 0) {
LOG.log(Level.SEVERE, "Publish message has no metrics nor exceptions for message from hostname: {0}," + " component_name: {1}, port: {2}, instance_id: {3}, instance_index: {4}", new Object[] { request.getHostname(), request.getComponentName(), request.getPort(), request.getInstanceId(), request.getInstanceIndex() });
return;
}
// Convert the message to MetricsRecord
String source = MetricsUtil.createSource(request.getHostname(), request.getPort(), request.getComponentName(), request.getInstanceId());
List<MetricsInfo> metricsInfos = new ArrayList<MetricsInfo>(message.getMetricsCount());
for (Metrics.MetricDatum metricDatum : message.getMetricsList()) {
MetricsInfo info = new MetricsInfo(metricDatum.getName(), metricDatum.getValue());
metricsInfos.add(info);
}
List<ExceptionInfo> exceptionInfos = new ArrayList<ExceptionInfo>(message.getExceptionsCount());
for (Metrics.ExceptionData exceptionData : message.getExceptionsList()) {
ExceptionInfo exceptionInfo = new ExceptionInfo(exceptionData.getStacktrace(), exceptionData.getLasttime(), exceptionData.getFirsttime(), exceptionData.getCount(), exceptionData.getLogging());
exceptionInfos.add(exceptionInfo);
}
LOG.info(String.format("%d MetricsInfo and %d ExceptionInfo to push", metricsInfos.size(), exceptionInfos.size()));
// Update the metrics
serverMetricsCounters.scope(SERVER_METRICS_RECEIVED).incrBy(metricsInfos.size());
serverMetricsCounters.scope(SERVER_EXCEPTIONS_RECEIVED).incrBy(exceptionInfos.size());
MetricsRecord record = new MetricsRecord(source, metricsInfos, exceptionInfos);
// Push MetricsRecord to Communicator, which would wake up ExecutorLooper bind with IMetricsSink
synchronized (metricsSinkCommunicators) {
Iterator<String> itr = metricsSinkCommunicators.keySet().iterator();
while (itr.hasNext()) {
String key = itr.next();
Communicator<MetricsRecord> c = metricsSinkCommunicators.get(key);
c.offer(record);
serverMetricsCounters.scope(SERVER_COMMUNICATOR_OFFER).incr();
serverMetricsCounters.scope(SERVER_COMMUNICATOR_SIZE + "-" + key).incrBy(c.size());
}
}
}
use of org.apache.heron.spi.metricsmgr.metrics.MetricsRecord in project heron by twitter.
the class WebSinkTest method before.
@Before
public void before() throws IOException {
defaultConf = new HashMap<>();
defaultConf.put("port", "9999");
defaultConf.put("path", "test");
defaultConf.put("flat-metrics", "true");
defaultConf.put("include-topology-name", "false");
context = Mockito.mock(SinkContext.class);
Mockito.when(context.getTopologyName()).thenReturn("testTopology");
Mockito.when(context.getSinkId()).thenReturn("testId");
Iterable<MetricsInfo> infos = Arrays.asList(new MetricsInfo("metric_1", "1.0"), new MetricsInfo("metric_2", "2.0"));
records = Arrays.asList(new MetricsRecord("machine/stuff/record_1", infos, Collections.<ExceptionInfo>emptyList()), new MetricsRecord("record_2", infos, Collections.<ExceptionInfo>emptyList()));
}
use of org.apache.heron.spi.metricsmgr.metrics.MetricsRecord in project heron by twitter.
the class WebSinkTest method testFlatMetrics.
/**
* Testing flat map with metrics
*/
@Test
public void testFlatMetrics() {
Map<String, Object> conf = new HashMap<>(defaultConf);
WebTestSink sink = new WebTestSink();
sink.init(conf, context);
for (MetricsRecord r : records) {
sink.processRecord(r);
}
Map<String, Object> results = sink.getMetrics();
Assert.assertEquals(4, results.size());
Assert.assertEquals(results.get("/stuff/record_1/metric_1"), 1.0d);
Assert.assertEquals(results.get("/stuff/record_1/metric_2"), 2.0d);
Assert.assertEquals(results.get("/record_2/metric_1"), 1.0d);
Assert.assertEquals(results.get("/record_2/metric_2"), 2.0d);
}
Aggregations