Skip to content

Commit

Permalink
Kafka meter binder without JMX (#1835)
Browse files Browse the repository at this point in the history
Provides a replacement for the `KafkaConsumerMetrics` binder (uses JMX) - `KafkaClientMetrics` and `KafkaStreamsMetrics` binders based off metrics provided by the Kafka client API. This allows all metrics provided by the client to be registered as Micrometer metrics, with the caveat that metrics with the same name but less tags will not be registered. For some metrics, Kafka provides a total metric and one broken down by things like topic and partition. Micrometer will opt to register the most specific ones.

Resolves #1095
Resolves #1096
  • Loading branch information
jeqo authored Mar 10, 2020
1 parent 6255fb3 commit abc0b30
Show file tree
Hide file tree
Showing 14 changed files with 1,050 additions and 116 deletions.
7 changes: 5 additions & 2 deletions dependencies.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ def VERSIONS = [
'net.sf.ehcache:ehcache:latest.release',
'org.apache.httpcomponents:httpasyncclient:latest.release',
'org.apache.httpcomponents:httpclient:latest.release',
// Pin version temporarily to restore builds. See gh-1756
'org.apache.kafka:kafka-clients:2.3.1',
'org.apache.kafka:kafka-clients:latest.release',
'org.apache.kafka:kafka-streams:latest.release',
'org.apache.logging.log4j:log4j-core:2.+',
'org.apache.tomcat.embed:tomcat-embed-core:8.+',
'org.aspectj:aspectjweaver:1.8.+',
Expand Down Expand Up @@ -69,6 +69,9 @@ def VERSIONS = [
'org.springframework:spring-test:4.+',
'ru.lanwen.wiremock:wiremock-junit5:latest.release',
'software.amazon.awssdk:cloudwatch:latest.release',
'org.testcontainers:testcontainers:latest.release',
'org.testcontainers:junit-jupiter:latest.release',
'org.testcontainers:kafka:latest.release'
]

subprojects {
Expand Down
8 changes: 8 additions & 0 deletions micrometer-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ dependencies {

optionalApi 'org.jooq:jooq'

optionalApi 'org.apache.kafka:kafka-clients'
optionalApi 'org.apache.kafka:kafka-streams'

testImplementation 'io.projectreactor:reactor-test'

// JUnit 5
Expand Down Expand Up @@ -86,4 +89,9 @@ dependencies {
testImplementation 'com.github.tomakehurst:wiremock-jre8'

testImplementation 'de.flapdoodle.embed:de.flapdoodle.embed.mongo'

// Kafka binder IT dependencies
testImplementation 'org.testcontainers:testcontainers'
testImplementation 'org.testcontainers:junit-jupiter'
testImplementation 'org.testcontainers:kafka'
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/**
* Copyright 2020 Pivotal Software, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use super file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.core.instrument.binder.kafka;

import io.micrometer.core.annotation.Incubating;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.MeterBinder;
import io.micrometer.core.lang.NonNullApi;
import io.micrometer.core.lang.NonNullFields;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.common.Metric;

/**
* Kafka Client metrics binder.
* <p>
* It is based on {@code metrics()} method returning {@link Metric} map exposed by clients and
* streams interface.
* <p>
* Meter names have the following convention: {@code kafka.(metric_group).(metric_name)}
*
* @author Jorge Quilcate
* @see <a href="https://docs.confluent.io/current/kafka/monitoring.html">Kakfa monitoring
* documentation</a>
* @since 1.4.0
*/
@Incubating(since = "1.4.0")
@NonNullApi
@NonNullFields
public class KafkaClientMetrics extends KafkaMetrics implements MeterBinder {

/**
* Kafka {@link Producer} metrics binder
*
* @param kafkaProducer producer instance to be instrumented
* @param tags additional tags
*/
public KafkaClientMetrics(Producer<?, ?> kafkaProducer, Iterable<Tag> tags) {
super(kafkaProducer::metrics, tags);
}

/**
* Kafka {@link Producer} metrics binder
*
* @param kafkaProducer producer instance to be instrumented
*/
public KafkaClientMetrics(Producer<?, ?> kafkaProducer) {
super(kafkaProducer::metrics);
}

/**
* Kafka {@link Consumer} metrics binder
*
* @param kafkaConsumer consumer instance to be instrumented
* @param tags additional tags
*/
public KafkaClientMetrics(Consumer<?, ?> kafkaConsumer, Iterable<Tag> tags) {
super(kafkaConsumer::metrics, tags);
}

/**
* Kafka {@link Consumer} metrics binder
*
* @param kafkaConsumer consumer instance to be instrumented
*/
public KafkaClientMetrics(Consumer<?, ?> kafkaConsumer) {
super(kafkaConsumer::metrics);
}

/**
* Kafka {@link AdminClient} metrics binder
*
* @param adminClient instance to be instrumented
* @param tags additional tags
*/
public KafkaClientMetrics(AdminClient adminClient, Iterable<Tag> tags) {
super(adminClient::metrics, tags);
}

/**
* Kafka {@link AdminClient} metrics binder
*
* @param adminClient instance to be instrumented
*/
public KafkaClientMetrics(AdminClient adminClient) {
super(adminClient::metrics);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
@Incubating(since = "1.1.0")
@NonNullApi
@NonNullFields
@Deprecated
public class KafkaConsumerMetrics implements MeterBinder, AutoCloseable {
private static final String JMX_DOMAIN = "kafka.consumer";
private static final String METRIC_NAME_PREFIX = "kafka.consumer.";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
/**
* Copyright 2020 Pivotal Software, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micrometer.core.instrument.binder.kafka;

import io.micrometer.core.annotation.Incubating;
import io.micrometer.core.instrument.FunctionCounter;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.Meter;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Tag;
import io.micrometer.core.instrument.binder.MeterBinder;
import io.micrometer.core.lang.NonNullApi;
import io.micrometer.core.lang.NonNullFields;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.function.ToDoubleFunction;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;

import static java.util.Collections.emptyList;

/**
* Kafka metrics binder.
* <p>
* It is based on {@code metrics()} method returning {@link Metric} map exposed by clients and
* streams interface.
* <p>
* Meter names have the following convention: {@code kafka.(metric_group).(metric_name)}
*
* @author Jorge Quilcate
* @see <a href="https://docs.confluent.io/current/kafka/monitoring.html">Kakfa monitoring
* documentation</a>
* @since 1.4.0
*/
@Incubating(since = "1.4.0")
@NonNullApi
@NonNullFields
class KafkaMetrics implements MeterBinder {
static final String METRIC_NAME_PREFIX = "kafka.";
static final String METRIC_GROUP_APP_INFO = "app-info";
static final String METRIC_GROUP_METRICS_COUNT = "kafka-metrics-count";
static final String VERSION_METRIC_NAME = "version";
static final String START_TIME_METRIC_NAME = "start-time-ms";

private final Supplier<Map<MetricName, ? extends Metric>> metricsSupplier;
private final Iterable<Tag> extraTags;
private final int extraTagsSize;

/**
* Keeps track of current set of metrics. When this values change, metrics are bound again.
*/
private volatile Set<MetricName> currentMeters = new HashSet<>();

private String kafkaVersion = "unknown";

KafkaMetrics(Supplier<Map<MetricName, ? extends Metric>> metricsSupplier) {
this(metricsSupplier, emptyList());
}

KafkaMetrics(Supplier<Map<MetricName, ? extends Metric>> metricsSupplier, Iterable<Tag> extraTags) {
this.metricsSupplier = metricsSupplier;
this.extraTags = extraTags;
int i = 0;
for (Tag ignored : extraTags) i++;
this.extraTagsSize = i + 1; // 1 = kafka version tag
}

@Override public void bindTo(MeterRegistry registry) {
Map<MetricName, ? extends Metric> metrics = metricsSupplier.get();
// Collect static metrics and tags
Metric startTimeMetric = null;
for (Map.Entry<MetricName, ? extends Metric> entry: metrics.entrySet()) {
MetricName name = entry.getKey();
if (METRIC_GROUP_APP_INFO.equals(name.group())) {
if (VERSION_METRIC_NAME.equals(name.name()))
kafkaVersion = (String) entry.getValue().metricValue();
else if (START_TIME_METRIC_NAME.equals(name.name()))
startTimeMetric = entry.getValue();
}
}
if (startTimeMetric != null) bindMeter(registry, startTimeMetric);
// Collect dynamic metrics
checkAndBindMetrics(registry);
}

/**
* Gather metrics from Kafka metrics API and register Meters.
* <p>
* As this is a one-off execution when binding a Kafka client, Meters include a call to this
* validation to double-check new metrics when returning values. This should only add the cost of
* validating meters registered counter when no new meters are present.
*/
void checkAndBindMetrics(MeterRegistry registry) {
Map<MetricName, ? extends Metric> metrics = metricsSupplier.get();
if (!currentMeters.equals(metrics.keySet())) {
synchronized (this) { //Enforce only happens once when metrics change
if (!currentMeters.equals(metrics.keySet())) {
//Register meters
currentMeters = new HashSet<>(metrics.keySet());
metrics.forEach((name, metric) -> {
//Filter out metrics from groups that includes metadata
if (METRIC_GROUP_APP_INFO.equals(name.group())) return;
if (METRIC_GROUP_METRICS_COUNT.equals(name.group())) return;
//Kafka has metrics with lower number of tags (e.g. with/without topic or partition tag)
//Remove meters with lower number of tags
boolean hasLessTags = false;
Collection<Meter> meters = registry.find(metricName(metric)).meters();
for (Meter meter : meters) {
if (meter.getId().getTags().size() < (metricTags(metric).size() + extraTagsSize))
registry.remove(meter);
// Check if already exists
else if (meter.getId().getTags().equals(metricTags(metric))) return;
else hasLessTags = true;
}
if (hasLessTags) return;
//Filter out non-numeric values
if (!isNumber(metric)) return;
bindMeter(registry, metric);
});
}
}
}
}

private boolean isNumber(Metric metric) {
return metric.metricValue() instanceof Double
|| metric.metricValue() instanceof Float
|| metric.metricValue() instanceof Integer
|| metric.metricValue() instanceof Long;
}

private void bindMeter(MeterRegistry registry, Metric metric) {
String name = metricName(metric);
if (name.endsWith("total") || name.endsWith("count"))
registerCounter(registry, metric, name, extraTags);
else if (name.endsWith("min") || name.endsWith("max") || name.endsWith("avg") || name.endsWith("rate"))
registerGauge(registry, metric, name, extraTags);
else registerGauge(registry, metric, name, extraTags);
}

private void registerGauge(MeterRegistry registry, Metric metric, String metricName, Iterable<Tag> extraTags) {
Gauge.builder(metricName, metric, toMetricValue(registry))
.tags(metricTags(metric))
.tags(extraTags)
.description(metric.metricName().description())
.register(registry);
}

private void registerCounter(MeterRegistry registry, Metric metric, String metricName, Iterable<Tag> extraTags) {
FunctionCounter.builder(metricName, metric, toMetricValue(registry))
.tags(metricTags(metric))
.tags(extraTags)
.description(metric.metricName().description())
.register(registry);
}

private ToDoubleFunction<Metric> toMetricValue(MeterRegistry registry) {
return metric -> {
//Double-check if new metrics are registered; if not (common scenario)
//it only adds metrics count validation
checkAndBindMetrics(registry);
if (metric.metricValue() instanceof Double) return (double) metric.metricValue();
else if (metric.metricValue() instanceof Integer) return ((Integer) metric.metricValue()).doubleValue();
else if (metric.metricValue() instanceof Long) return ((Long) metric.metricValue()).doubleValue();
else return ((Float) metric.metricValue()).doubleValue();
};
}

private List<Tag> metricTags(Metric metric) {
List<Tag> tags = new ArrayList<>();
tags.add(Tag.of("kafka-version", kafkaVersion));
metric.metricName().tags().forEach((key, value) -> tags.add(Tag.of(key, value)));
return tags;
}

private String metricName(Metric metric) {
String name = METRIC_NAME_PREFIX + metric.metricName().group() + "." + metric.metricName().name();
return name.replaceAll("-metrics", "").replaceAll("-", ".");
}
}
Loading

0 comments on commit abc0b30

Please sign in to comment.