use of org.apache.samza.metrics.ReadableMetricsRegistry in project samza by apache.
the class TestKafkaSystemConsumerMetrics method testKafkaSystemConsumerMetrics.
@Test
public void testKafkaSystemConsumerMetrics() {
String systemName = "system";
TopicPartition tp1 = new TopicPartition("topic1", 1);
TopicPartition tp2 = new TopicPartition("topic2", 2);
String clientName = "clientName";
// record expected values for further comparison
Map<String, String> expectedValues = new HashMap<>();
ReadableMetricsRegistry registry = new MetricsRegistryMap();
KafkaSystemConsumerMetrics metrics = new KafkaSystemConsumerMetrics(systemName, registry);
// initialize the metrics for the partitions
metrics.registerTopicPartition(tp1);
metrics.registerTopicPartition(tp2);
// initialize the metrics for the host:port
metrics.registerClientProxy(clientName);
metrics.setOffsets(tp1, 1001);
metrics.setOffsets(tp2, 1002);
expectedValues.put(metrics.offsets().get(tp1).getName(), "1001");
expectedValues.put(metrics.offsets().get(tp2).getName(), "1002");
metrics.incBytesReads(tp1, 10);
// total 15
metrics.incBytesReads(tp1, 5);
expectedValues.put(metrics.bytesRead().get(tp1).getName(), "15");
metrics.incReads(tp1);
// total 2
metrics.incReads(tp1);
expectedValues.put(metrics.reads().get(tp1).getName(), "2");
metrics.setHighWatermarkValue(tp2, 1000);
// final value 1001
metrics.setHighWatermarkValue(tp2, 1001);
expectedValues.put(metrics.highWatermark().get(tp2).getName(), "1001");
metrics.setLagValue(tp1, 200);
// final value 201
metrics.setLagValue(tp1, 201);
expectedValues.put(metrics.lag().get(tp1).getName(), "201");
// broker-bytes-read
metrics.incClientBytesReads(clientName, 100);
// total 210
metrics.incClientBytesReads(clientName, 110);
expectedValues.put(metrics.clientBytesRead().get(clientName).getName(), "210");
// messages-read
metrics.incClientReads(clientName);
// total 2
metrics.incClientReads(clientName);
expectedValues.put(metrics.clientReads().get(clientName).getName(), "2");
// "topic-partitions"
metrics.setNumTopicPartitions(clientName, 2);
// final value 3
metrics.setNumTopicPartitions(clientName, 3);
expectedValues.put(metrics.topicPartitions().get(clientName).getName(), "3");
String groupName = metrics.group();
Assert.assertEquals(groupName, KafkaSystemConsumerMetrics.class.getName());
Assert.assertEquals(metrics.systemName(), systemName);
Map<String, Metric> metricMap = registry.getGroup(groupName);
validate(metricMap, expectedValues);
}
use of org.apache.samza.metrics.ReadableMetricsRegistry in project samza by apache.
the class MetricsSnapshotReporter method innerRun.
public void innerRun() {
LOG.debug("Begin flushing metrics.");
for (MetricsRegistryWithSource metricsRegistryWithSource : this.registries) {
String source = metricsRegistryWithSource.getSource();
ReadableMetricsRegistry registry = metricsRegistryWithSource.getRegistry();
LOG.debug("Flushing metrics for {}.", source);
Map<String, Map<String, Object>> metricsMsg = new HashMap<>();
// metrics
registry.getGroups().forEach(group -> {
Map<String, Object> groupMsg = new HashMap<>();
registry.getGroup(group).forEach((name, metric) -> {
if (!shouldIgnore(group, name)) {
metric.visit(new MetricsVisitor() {
@Override
public void counter(Counter counter) {
groupMsg.put(name, counter.getCount());
}
@Override
public <T> void gauge(Gauge<T> gauge) {
groupMsg.put(name, gauge.getValue());
}
@Override
public void timer(Timer timer) {
groupMsg.put(name, timer.getSnapshot().getAverage());
}
});
}
});
// dont emit empty groups
if (!groupMsg.isEmpty()) {
metricsMsg.put(group, groupMsg);
}
});
// publish to Kafka only if the metricsMsg carries any metrics
if (!metricsMsg.isEmpty()) {
MetricsHeader header = new MetricsHeader(this.jobName, this.jobId, this.containerName, this.executionEnvContainerId, Optional.of(this.samzaEpochId), source, this.version, this.samzaVersion, this.host, this.clock.currentTimeMillis(), this.resetTime);
Metrics metrics = new Metrics(metricsMsg);
LOG.debug("Flushing metrics for {} to {} with header and map: header={}, map={}.", source, out, header.getAsMap(), metrics.getAsMap());
MetricsSnapshot metricsSnapshot = new MetricsSnapshot(header, metrics);
Object maybeSerialized = (this.serializer != null) ? this.serializer.toBytes(metricsSnapshot) : metricsSnapshot;
try {
this.producer.send(source, new OutgoingMessageEnvelope(this.out, this.host, null, maybeSerialized));
// Always flush, since we don't want metrics to get batched up.
this.producer.flush(source);
} catch (Exception e) {
LOG.error(String.format("Exception when flushing metrics for source %s", source), e);
}
}
}
LOG.debug("Finished flushing metrics.");
}
use of org.apache.samza.metrics.ReadableMetricsRegistry in project samza by apache.
the class TestOperatorImpl method testOnMessageUpdatesMetrics.
@Test
public void testOnMessageUpdatesMetrics() {
ReadableMetricsRegistry mockMetricsRegistry = mock(ReadableMetricsRegistry.class);
when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(mockMetricsRegistry);
Counter mockCounter = mock(Counter.class);
Timer mockTimer = mock(Timer.class);
when(mockMetricsRegistry.newCounter(anyString(), anyString())).thenReturn(mockCounter);
when(mockMetricsRegistry.newTimer(anyString(), anyString())).thenReturn(mockTimer);
Object mockTestOpImplOutput = mock(Object.class);
OperatorImpl<Object, Object> opImpl = new TestOpImpl(mockTestOpImplOutput);
opImpl.init(this.internalTaskContext);
// send a message to this operator
MessageCollector mockCollector = mock(MessageCollector.class);
TaskCoordinator mockCoordinator = mock(TaskCoordinator.class);
opImpl.onMessage(mock(Object.class), mockCollector, mockCoordinator);
// verify that it updates message count and timer metrics
verify(mockCounter, times(1)).inc();
verify(mockTimer, times(1)).update(anyLong());
}
use of org.apache.samza.metrics.ReadableMetricsRegistry in project samza by apache.
the class TestOperatorImpl method testOnTimerUpdatesMetrics.
@Test
public void testOnTimerUpdatesMetrics() {
ReadableMetricsRegistry mockMetricsRegistry = mock(ReadableMetricsRegistry.class);
when(this.context.getContainerContext().getContainerMetricsRegistry()).thenReturn(mockMetricsRegistry);
Counter mockMessageCounter = mock(Counter.class);
Timer mockTimer = mock(Timer.class);
when(mockMetricsRegistry.newCounter(anyString(), anyString())).thenReturn(mockMessageCounter);
when(mockMetricsRegistry.newTimer(anyString(), anyString())).thenReturn(mockTimer);
Object mockTestOpImplOutput = mock(Object.class);
OperatorImpl<Object, Object> opImpl = new TestOpImpl(mockTestOpImplOutput);
opImpl.init(this.internalTaskContext);
// send a message to this operator
MessageCollector mockCollector = mock(MessageCollector.class);
TaskCoordinator mockCoordinator = mock(TaskCoordinator.class);
opImpl.onTimer(mockCollector, mockCoordinator);
// verify that it updates metrics
verify(mockMessageCounter, times(0)).inc();
verify(mockTimer, times(1)).update(anyLong());
}
use of org.apache.samza.metrics.ReadableMetricsRegistry in project samza by apache.
the class TestYarnContainerHeartbeatServlet method setup.
@Before
public void setup() throws Exception {
container = mock(YarnContainer.class);
ReadableMetricsRegistry registry = new MetricsRegistryMap();
yarnAppState = new YarnAppState(-1, ConverterUtils.toContainerId("container_1350670447861_0003_01_000001"), "testHost", 1, 1);
webApp = new HttpServer("/", 0, "", new ServletHolder(new DefaultServlet()));
webApp.addServlet("/", new YarnContainerHeartbeatServlet(yarnAppState, registry));
webApp.start();
mapper = new ObjectMapper();
}
Aggregations