use of org.apache.kafka.common.metrics.KafkaMetricsContext in project kafka by apache.
the class KafkaStreams method getMetrics.
private static Metrics getMetrics(final StreamsConfig config, final Time time, final String clientId) {
final MetricConfig metricConfig = new MetricConfig().samples(config.getInt(StreamsConfig.METRICS_NUM_SAMPLES_CONFIG)).recordLevel(Sensor.RecordingLevel.forName(config.getString(StreamsConfig.METRICS_RECORDING_LEVEL_CONFIG))).timeWindow(config.getLong(StreamsConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS);
final List<MetricsReporter> reporters = config.getConfiguredInstances(StreamsConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class, Collections.singletonMap(StreamsConfig.CLIENT_ID_CONFIG, clientId));
final JmxReporter jmxReporter = new JmxReporter();
jmxReporter.configure(config.originals());
reporters.add(jmxReporter);
final MetricsContext metricsContext = new KafkaMetricsContext(JMX_PREFIX, config.originalsWithPrefix(CommonClientConfigs.METRICS_CONTEXT_PREFIX));
return new Metrics(metricConfig, reporters, time, metricsContext);
}
use of org.apache.kafka.common.metrics.KafkaMetricsContext in project kafka by apache.
the class KafkaAdminClient method createInternal.
static KafkaAdminClient createInternal(AdminClientConfig config, TimeoutProcessorFactory timeoutProcessorFactory, HostResolver hostResolver) {
Metrics metrics = null;
NetworkClient networkClient = null;
Time time = Time.SYSTEM;
String clientId = generateClientId(config);
ChannelBuilder channelBuilder = null;
Selector selector = null;
ApiVersions apiVersions = new ApiVersions();
LogContext logContext = createLogContext(clientId);
try {
// Since we only request node information, it's safe to pass true for allowAutoTopicCreation (and it
// simplifies communication with older brokers)
AdminMetadataManager metadataManager = new AdminMetadataManager(logContext, config.getLong(AdminClientConfig.RETRY_BACKOFF_MS_CONFIG), config.getLong(AdminClientConfig.METADATA_MAX_AGE_CONFIG));
List<InetSocketAddress> addresses = ClientUtils.parseAndValidateAddresses(config.getList(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG), config.getString(AdminClientConfig.CLIENT_DNS_LOOKUP_CONFIG));
metadataManager.update(Cluster.bootstrap(addresses), time.milliseconds());
List<MetricsReporter> reporters = config.getConfiguredInstances(AdminClientConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class, Collections.singletonMap(AdminClientConfig.CLIENT_ID_CONFIG, clientId));
Map<String, String> metricTags = Collections.singletonMap("client-id", clientId);
MetricConfig metricConfig = new MetricConfig().samples(config.getInt(AdminClientConfig.METRICS_NUM_SAMPLES_CONFIG)).timeWindow(config.getLong(AdminClientConfig.METRICS_SAMPLE_WINDOW_MS_CONFIG), TimeUnit.MILLISECONDS).recordLevel(Sensor.RecordingLevel.forName(config.getString(AdminClientConfig.METRICS_RECORDING_LEVEL_CONFIG))).tags(metricTags);
JmxReporter jmxReporter = new JmxReporter();
jmxReporter.configure(config.originals());
reporters.add(jmxReporter);
MetricsContext metricsContext = new KafkaMetricsContext(JMX_PREFIX, config.originalsWithPrefix(CommonClientConfigs.METRICS_CONTEXT_PREFIX));
metrics = new Metrics(metricConfig, reporters, time, metricsContext);
String metricGrpPrefix = "admin-client";
channelBuilder = ClientUtils.createChannelBuilder(config, time, logContext);
selector = new Selector(config.getLong(AdminClientConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG), metrics, time, metricGrpPrefix, channelBuilder, logContext);
networkClient = new NetworkClient(metadataManager.updater(), null, selector, clientId, 1, config.getLong(AdminClientConfig.RECONNECT_BACKOFF_MS_CONFIG), config.getLong(AdminClientConfig.RECONNECT_BACKOFF_MAX_MS_CONFIG), config.getInt(AdminClientConfig.SEND_BUFFER_CONFIG), config.getInt(AdminClientConfig.RECEIVE_BUFFER_CONFIG), (int) TimeUnit.HOURS.toMillis(1), config.getLong(AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MS_CONFIG), config.getLong(AdminClientConfig.SOCKET_CONNECTION_SETUP_TIMEOUT_MAX_MS_CONFIG), time, true, apiVersions, null, logContext, (hostResolver == null) ? new DefaultHostResolver() : hostResolver);
return new KafkaAdminClient(config, clientId, time, metadataManager, metrics, networkClient, timeoutProcessorFactory, logContext);
} catch (Throwable exc) {
closeQuietly(metrics, "Metrics");
closeQuietly(networkClient, "NetworkClient");
closeQuietly(selector, "Selector");
closeQuietly(channelBuilder, "ChannelBuilder");
throw new KafkaException("Failed to create new KafkaAdminClient", exc);
}
}
use of org.apache.kafka.common.metrics.KafkaMetricsContext in project kafka by apache.
the class StreamThreadTest method shouldCreateMetricsAtStartup.
@Test
public void shouldCreateMetricsAtStartup() {
final StreamThread thread = createStreamThread(CLIENT_ID, config, false);
final String defaultGroupName = "stream-thread-metrics";
final Map<String, String> defaultTags = Collections.singletonMap("thread-id", thread.getName());
final String descriptionIsNotVerified = "";
assertNotNull(metrics.metrics().get(metrics.metricName("commit-latency-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("commit-latency-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("commit-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("commit-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("commit-ratio", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-latency-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-latency-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-ratio", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-records-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("poll-records-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-latency-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-latency-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-ratio", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-records-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("process-records-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-latency-avg", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-latency-max", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("punctuate-ratio", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("task-created-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("task-created-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("task-closed-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNotNull(metrics.metrics().get(metrics.metricName("task-closed-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNull(metrics.metrics().get(metrics.metricName("skipped-records-rate", defaultGroupName, descriptionIsNotVerified, defaultTags)));
assertNull(metrics.metrics().get(metrics.metricName("skipped-records-total", defaultGroupName, descriptionIsNotVerified, defaultTags)));
final String taskGroupName = "stream-task-metrics";
final Map<String, String> taskTags = mkMap(mkEntry("task-id", "all"), mkEntry("thread-id", thread.getName()));
assertNull(metrics.metrics().get(metrics.metricName("commit-latency-avg", taskGroupName, descriptionIsNotVerified, taskTags)));
assertNull(metrics.metrics().get(metrics.metricName("commit-latency-max", taskGroupName, descriptionIsNotVerified, taskTags)));
assertNull(metrics.metrics().get(metrics.metricName("commit-rate", taskGroupName, descriptionIsNotVerified, taskTags)));
final JmxReporter reporter = new JmxReporter();
final MetricsContext metricsContext = new KafkaMetricsContext("kafka.streams");
reporter.contextChange(metricsContext);
metrics.addReporter(reporter);
assertEquals(CLIENT_ID + "-StreamThread-1", thread.getName());
assertTrue(reporter.containsMbean(String.format("kafka.streams:type=%s,%s=%s", defaultGroupName, "thread-id", thread.getName())));
assertFalse(reporter.containsMbean(String.format("kafka.streams:type=stream-task-metrics,%s=%s,task-id=all", "thread-id", thread.getName())));
}
use of org.apache.kafka.common.metrics.KafkaMetricsContext in project kafka by apache.
the class MeteredTimestampedKeyValueStoreTest method testMetrics.
@Test
public void testMetrics() {
init();
final JmxReporter reporter = new JmxReporter();
final MetricsContext metricsContext = new KafkaMetricsContext("kafka.streams");
reporter.contextChange(metricsContext);
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format("kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s", STORE_LEVEL_GROUP, THREAD_ID_TAG_KEY, threadId, taskId.toString(), STORE_TYPE, STORE_NAME)));
}
use of org.apache.kafka.common.metrics.KafkaMetricsContext in project kafka by apache.
the class MeteredKeyValueStoreTest method testMetrics.
@Test
public void testMetrics() {
init();
final JmxReporter reporter = new JmxReporter();
final MetricsContext metricsContext = new KafkaMetricsContext("kafka.streams");
reporter.contextChange(metricsContext);
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format("kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s", STORE_LEVEL_GROUP, THREAD_ID_TAG_KEY, threadId, taskId.toString(), STORE_TYPE, STORE_NAME)));
}
Aggregations