Spring Kafka library instrumentation (#6283)

* Spring Kafka library instrumentation

* Merge and fix prior merge

Co-authored-by: Trask Stalnaker <trask.stalnaker@gmail.com>
This commit is contained in:
Mateusz Rzeszutek 2022-07-18 23:38:44 +02:00 committed by GitHub
parent 257009f944
commit 5bc7abf178
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 637 additions and 326 deletions

View File

@ -17,6 +17,7 @@ dependencies {
bootstrap(project(":instrumentation:kafka:kafka-clients:kafka-clients-0.11:bootstrap")) bootstrap(project(":instrumentation:kafka:kafka-clients:kafka-clients-0.11:bootstrap"))
implementation(project(":instrumentation:kafka:kafka-clients:kafka-clients-common:library")) implementation(project(":instrumentation:kafka:kafka-clients:kafka-clients-common:library"))
implementation(project(":instrumentation:spring:spring-kafka-2.7:library"))
library("org.springframework.kafka:spring-kafka:2.7.0") library("org.springframework.kafka:spring-kafka:2.7.0")

View File

@ -5,20 +5,17 @@
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.javaagent.instrumentation.spring.kafka;
import static io.opentelemetry.javaagent.instrumentation.spring.kafka.SpringKafkaSingletons.telemetry;
import static net.bytebuddy.matcher.ElementMatchers.isProtected; import static net.bytebuddy.matcher.ElementMatchers.isProtected;
import static net.bytebuddy.matcher.ElementMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.named;
import static net.bytebuddy.matcher.ElementMatchers.returns; import static net.bytebuddy.matcher.ElementMatchers.returns;
import static net.bytebuddy.matcher.ElementMatchers.takesArguments; import static net.bytebuddy.matcher.ElementMatchers.takesArguments;
import io.opentelemetry.context.Context;
import io.opentelemetry.instrumentation.api.util.VirtualField;
import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation; import io.opentelemetry.javaagent.extension.instrumentation.TypeInstrumentation;
import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer; import io.opentelemetry.javaagent.extension.instrumentation.TypeTransformer;
import net.bytebuddy.asm.Advice; import net.bytebuddy.asm.Advice;
import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher; import net.bytebuddy.matcher.ElementMatcher;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.springframework.kafka.listener.BatchInterceptor; import org.springframework.kafka.listener.BatchInterceptor;
import org.springframework.kafka.listener.RecordInterceptor; import org.springframework.kafka.listener.RecordInterceptor;
@ -56,13 +53,13 @@ public class AbstractMessageListenerContainerInstrumentation implements TypeInst
public static <K, V> void onExit( public static <K, V> void onExit(
@Advice.Return(readOnly = false) BatchInterceptor<K, V> interceptor) { @Advice.Return(readOnly = false) BatchInterceptor<K, V> interceptor) {
if (!(interceptor instanceof InstrumentedBatchInterceptor)) { if (interceptor == null
VirtualField<ConsumerRecords<K, V>, Context> receiveContextField = || !interceptor
VirtualField.find(ConsumerRecords.class, Context.class); .getClass()
VirtualField<ConsumerRecords<K, V>, State<ConsumerRecords<K, V>>> stateField = .getName()
VirtualField.find(ConsumerRecords.class, State.class); .equals(
interceptor = "io.opentelemetry.instrumentation.spring.kafka.v2_7.InstrumentedBatchInterceptor")) {
new InstrumentedBatchInterceptor<>(receiveContextField, stateField, interceptor); interceptor = telemetry().createBatchInterceptor(interceptor);
} }
} }
} }
@ -74,13 +71,13 @@ public class AbstractMessageListenerContainerInstrumentation implements TypeInst
public static <K, V> void onExit( public static <K, V> void onExit(
@Advice.Return(readOnly = false) RecordInterceptor<K, V> interceptor) { @Advice.Return(readOnly = false) RecordInterceptor<K, V> interceptor) {
if (!(interceptor instanceof InstrumentedRecordInterceptor)) { if (interceptor == null
VirtualField<ConsumerRecord<K, V>, Context> receiveContextField = || !interceptor
VirtualField.find(ConsumerRecord.class, Context.class); .getClass()
VirtualField<ConsumerRecord<K, V>, State<ConsumerRecord<K, V>>> stateField = .getName()
VirtualField.find(ConsumerRecord.class, State.class); .equals(
interceptor = "io.opentelemetry.instrumentation.spring.kafka.v2_7.InstrumentedRecordInterceptor")) {
new InstrumentedRecordInterceptor<>(receiveContextField, stateField, interceptor); interceptor = telemetry().createRecordInterceptor(interceptor);
} }
} }
} }

View File

@ -6,42 +6,26 @@
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.javaagent.instrumentation.spring.kafka;
import io.opentelemetry.api.GlobalOpenTelemetry; import io.opentelemetry.api.GlobalOpenTelemetry;
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter; import io.opentelemetry.instrumentation.spring.kafka.v2_7.SpringKafkaTelemetry;
import io.opentelemetry.instrumentation.kafka.internal.KafkaInstrumenterFactory;
import io.opentelemetry.javaagent.bootstrap.internal.ExperimentalConfig; import io.opentelemetry.javaagent.bootstrap.internal.ExperimentalConfig;
import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig; import io.opentelemetry.javaagent.bootstrap.internal.InstrumentationConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
public final class SpringKafkaSingletons { public final class SpringKafkaSingletons {
private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-kafka-2.7"; private static final SpringKafkaTelemetry TELEMETRY =
SpringKafkaTelemetry.builder(GlobalOpenTelemetry.get())
.setCaptureExperimentalSpanAttributes(
InstrumentationConfig.get()
.getBoolean("otel.instrumentation.kafka.experimental-span-attributes", false))
.setPropagationEnabled(
InstrumentationConfig.get()
.getBoolean("otel.instrumentation.kafka.client-propagation.enabled", true))
.setMessagingReceiveInstrumentationEnabled(
ExperimentalConfig.get().messagingReceiveInstrumentationEnabled())
.build();
private static final Instrumenter<ConsumerRecords<?, ?>, Void> BATCH_PROCESS_INSTRUMENTER; public static SpringKafkaTelemetry telemetry() {
private static final Instrumenter<ConsumerRecord<?, ?>, Void> PROCESS_INSTRUMENTER; return TELEMETRY;
static {
KafkaInstrumenterFactory factory =
new KafkaInstrumenterFactory(GlobalOpenTelemetry.get(), INSTRUMENTATION_NAME)
.setCaptureExperimentalSpanAttributes(
InstrumentationConfig.get()
.getBoolean("otel.instrumentation.kafka.experimental-span-attributes", false))
.setPropagationEnabled(
InstrumentationConfig.get()
.getBoolean("otel.instrumentation.kafka.client-propagation.enabled", true))
.setMessagingReceiveInstrumentationEnabled(
ExperimentalConfig.get().messagingReceiveInstrumentationEnabled())
.setErrorCauseExtractor(SpringKafkaErrorCauseExtractor.INSTANCE);
BATCH_PROCESS_INSTRUMENTER = factory.createBatchProcessInstrumenter();
PROCESS_INSTRUMENTER = factory.createConsumerProcessInstrumenter();
}
public static Instrumenter<ConsumerRecords<?, ?>, Void> batchProcessInstrumenter() {
return BATCH_PROCESS_INSTRUMENTER;
}
public static Instrumenter<ConsumerRecord<?, ?>, Void> processInstrumenter() {
return PROCESS_INSTRUMENTER;
} }
private SpringKafkaSingletons() {} private SpringKafkaSingletons() {}

View File

@ -1,26 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.javaagent.instrumentation.spring.kafka;
import com.google.auto.value.AutoValue;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
@AutoValue
public abstract class State<REQUEST> {
public static <REQUEST> State<REQUEST> create(REQUEST request, Context context, Scope scope) {
return new AutoValue_State<>(request, context, scope);
}
public abstract REQUEST request();
public abstract Context context();
public abstract Scope scope();
State() {}
}

View File

@ -9,21 +9,39 @@ import static io.opentelemetry.api.common.AttributeKey.longKey;
import static io.opentelemetry.instrumentation.testing.util.TelemetryDataUtil.orderByRootSpanKind; import static io.opentelemetry.instrumentation.testing.util.TelemetryDataUtil.orderByRootSpanKind;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies;
import static java.util.Collections.emptyList;
import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension;
import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.sdk.trace.data.LinkData;
import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.sdk.trace.data.SpanData;
import io.opentelemetry.sdk.trace.data.StatusData; import io.opentelemetry.sdk.trace.data.StatusData;
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
import io.opentelemetry.testing.AbstractSpringKafkaTest; import io.opentelemetry.testing.AbstractSpringKafkaTest;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.assertj.core.api.AbstractLongAssert; import org.assertj.core.api.AbstractLongAssert;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
class SpringKafkaTest extends AbstractSpringKafkaTest { class SpringKafkaTest extends AbstractSpringKafkaTest {
@RegisterExtension
protected static final InstrumentationExtension testing = AgentInstrumentationExtension.create();
@Override
protected InstrumentationExtension testing() {
return testing;
}
@Override
protected List<Class<?>> additionalSpringConfigs() {
return emptyList();
}
@Test @Test
void shouldCreateSpansForSingleRecordProcess() { void shouldCreateSpansForSingleRecordProcess() {
testing.runWithSpan( testing.runWithSpan(

View File

@ -5,206 +5,26 @@
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.javaagent.instrumentation.spring.kafka;
import static io.opentelemetry.instrumentation.testing.util.TelemetryDataUtil.orderByRootSpanKind; import static java.util.Collections.emptyList;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies;
import io.opentelemetry.api.trace.SpanKind; import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension;
import io.opentelemetry.sdk.trace.data.LinkData; import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
import io.opentelemetry.sdk.trace.data.SpanData; import io.opentelemetry.testing.AbstractSpringKafkaNoReceiveTelemetryTest;
import io.opentelemetry.sdk.trace.data.StatusData; import java.util.List;
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes; import org.junit.jupiter.api.extension.RegisterExtension;
import io.opentelemetry.testing.AbstractSpringKafkaTest;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import org.assertj.core.api.AbstractLongAssert;
import org.junit.jupiter.api.Test;
class SpringKafkaNoReceiveTelemetryTest extends AbstractSpringKafkaTest { class SpringKafkaNoReceiveTelemetryTest extends AbstractSpringKafkaNoReceiveTelemetryTest {
@Test @RegisterExtension
void shouldCreateSpansForSingleRecordProcess() { protected static final InstrumentationExtension testing = AgentInstrumentationExtension.create();
testing.runWithSpan(
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testSingleTopic", "10", "testSpan");
return 0;
});
});
testing.waitAndAssertTraces( @Override
trace -> protected InstrumentationExtension testing() {
trace.hasSpansSatisfyingExactly( return testing;
span -> span.hasName("producer"),
span ->
span.hasName("testSingleTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testSingleTopic process")
.hasKind(SpanKind.CONSUMER)
.hasParent(trace.getSpan(1))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process"),
satisfies(
SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES,
AbstractLongAssert::isNotNegative),
satisfies(
SemanticAttributes.MESSAGING_KAFKA_PARTITION,
AbstractLongAssert::isNotNegative)),
span -> span.hasName("consumer").hasParent(trace.getSpan(2))));
} }
@Test @Override
void shouldHandleFailureInKafkaListener() { protected List<Class<?>> additionalSpringConfigs() {
testing.runWithSpan( return emptyList();
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testSingleTopic", "10", "error");
return 0;
});
});
testing.waitAndAssertTraces(
trace ->
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testSingleTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testSingleTopic process")
.hasKind(SpanKind.CONSUMER)
.hasParent(trace.getSpan(1))
.hasStatus(StatusData.error())
.hasException(new IllegalArgumentException("boom"))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process"),
satisfies(
SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES,
AbstractLongAssert::isNotNegative),
satisfies(
SemanticAttributes.MESSAGING_KAFKA_PARTITION,
AbstractLongAssert::isNotNegative)),
span -> span.hasName("consumer").hasParent(trace.getSpan(2))));
}
@Test
void shouldCreateSpansForBatchReceiveAndProcess() throws InterruptedException {
Map<String, String> batchMessages = new HashMap<>();
batchMessages.put("10", "testSpan1");
batchMessages.put("20", "testSpan2");
sendBatchMessages(batchMessages);
AtomicReference<SpanData> producer1 = new AtomicReference<>();
AtomicReference<SpanData> producer2 = new AtomicReference<>();
testing.waitAndAssertSortedTraces(
orderByRootSpanKind(SpanKind.INTERNAL, SpanKind.CONSUMER),
trace -> {
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")));
producer1.set(trace.getSpan(1));
producer2.set(trace.getSpan(2));
},
trace ->
trace.hasSpansSatisfyingExactly(
span ->
span.hasName("testBatchTopic process")
.hasKind(SpanKind.CONSUMER)
.hasNoParent()
.hasLinks(
LinkData.create(producer1.get().getSpanContext()),
LinkData.create(producer2.get().getSpanContext()))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process")),
span -> span.hasName("consumer").hasParent(trace.getSpan(0))));
}
@Test
void shouldHandleFailureInKafkaBatchListener() {
testing.runWithSpan(
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testBatchTopic", "10", "error");
return 0;
});
});
AtomicReference<SpanData> producer = new AtomicReference<>();
testing.waitAndAssertSortedTraces(
orderByRootSpanKind(SpanKind.INTERNAL, SpanKind.CONSUMER),
trace -> {
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")));
producer.set(trace.getSpan(1));
},
trace ->
trace.hasSpansSatisfyingExactly(
span ->
span.hasName("testBatchTopic process")
.hasKind(SpanKind.CONSUMER)
.hasNoParent()
.hasLinks(LinkData.create(producer.get().getSpanContext()))
.hasStatus(StatusData.error())
.hasException(new IllegalArgumentException("boom"))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process")),
span -> span.hasName("consumer").hasParent(trace.getSpan(0))));
} }
} }

View File

@ -0,0 +1,21 @@
plugins {
id("otel.library-instrumentation")
}
dependencies {
compileOnly("com.google.auto.value:auto-value-annotations")
annotationProcessor("com.google.auto.value:auto-value")
implementation(project(":instrumentation:kafka:kafka-clients:kafka-clients-common:library"))
compileOnly("org.springframework.kafka:spring-kafka:2.7.0")
testImplementation(project(":instrumentation:spring:spring-kafka-2.7:testing"))
testImplementation(project(":instrumentation:kafka:kafka-clients:kafka-clients-2.6:library"))
// 2.7.0 has a bug that makes decorating a Kafka Producer impossible
testImplementation("org.springframework.kafka:spring-kafka:2.7.1")
testLibrary("org.springframework.boot:spring-boot-starter-test:2.5.3")
testLibrary("org.springframework.boot:spring-boot-starter:2.5.3")
}

View File

@ -3,30 +3,31 @@
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import static io.opentelemetry.javaagent.instrumentation.spring.kafka.SpringKafkaSingletons.batchProcessInstrumenter;
import io.opentelemetry.context.Context; import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope; import io.opentelemetry.context.Scope;
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter;
import io.opentelemetry.instrumentation.api.util.VirtualField; import io.opentelemetry.instrumentation.api.util.VirtualField;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.springframework.kafka.listener.BatchInterceptor; import org.springframework.kafka.listener.BatchInterceptor;
public final class InstrumentedBatchInterceptor<K, V> implements BatchInterceptor<K, V> { final class InstrumentedBatchInterceptor<K, V> implements BatchInterceptor<K, V> {
private final VirtualField<ConsumerRecords<K, V>, Context> receiveContextField; private static final VirtualField<ConsumerRecords<?, ?>, Context> receiveContextField =
private final VirtualField<ConsumerRecords<K, V>, State<ConsumerRecords<K, V>>> stateField; VirtualField.find(ConsumerRecords.class, Context.class);
private static final VirtualField<ConsumerRecords<?, ?>, State<ConsumerRecords<?, ?>>>
stateField = VirtualField.find(ConsumerRecords.class, State.class);
private final Instrumenter<ConsumerRecords<?, ?>, Void> batchProcessInstrumenter;
@Nullable private final BatchInterceptor<K, V> decorated; @Nullable private final BatchInterceptor<K, V> decorated;
public InstrumentedBatchInterceptor( InstrumentedBatchInterceptor(
VirtualField<ConsumerRecords<K, V>, Context> receiveContextField, Instrumenter<ConsumerRecords<?, ?>, Void> batchProcessInstrumenter,
VirtualField<ConsumerRecords<K, V>, State<ConsumerRecords<K, V>>> stateField,
@Nullable BatchInterceptor<K, V> decorated) { @Nullable BatchInterceptor<K, V> decorated) {
this.receiveContextField = receiveContextField; this.batchProcessInstrumenter = batchProcessInstrumenter;
this.stateField = stateField;
this.decorated = decorated; this.decorated = decorated;
} }
@ -34,8 +35,8 @@ public final class InstrumentedBatchInterceptor<K, V> implements BatchIntercepto
public ConsumerRecords<K, V> intercept(ConsumerRecords<K, V> records, Consumer<K, V> consumer) { public ConsumerRecords<K, V> intercept(ConsumerRecords<K, V> records, Consumer<K, V> consumer) {
Context parentContext = getParentContext(records); Context parentContext = getParentContext(records);
if (batchProcessInstrumenter().shouldStart(parentContext, records)) { if (batchProcessInstrumenter.shouldStart(parentContext, records)) {
Context context = batchProcessInstrumenter().start(parentContext, records); Context context = batchProcessInstrumenter.start(parentContext, records);
Scope scope = context.makeCurrent(); Scope scope = context.makeCurrent();
stateField.set(records, State.create(records, context, scope)); stateField.set(records, State.create(records, context, scope));
} }
@ -67,11 +68,11 @@ public final class InstrumentedBatchInterceptor<K, V> implements BatchIntercepto
} }
private void end(ConsumerRecords<K, V> records, @Nullable Throwable error) { private void end(ConsumerRecords<K, V> records, @Nullable Throwable error) {
State<ConsumerRecords<K, V>> state = stateField.get(records); State<ConsumerRecords<?, ?>> state = stateField.get(records);
stateField.set(records, null); stateField.set(records, null);
if (state != null) { if (state != null) {
state.scope().close(); state.scope().close();
batchProcessInstrumenter().end(state.context(), state.request(), null, error); batchProcessInstrumenter.end(state.context(), state.request(), null, error);
} }
} }
} }

View File

@ -3,30 +3,31 @@
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import static io.opentelemetry.javaagent.instrumentation.spring.kafka.SpringKafkaSingletons.processInstrumenter;
import io.opentelemetry.context.Context; import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope; import io.opentelemetry.context.Scope;
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter;
import io.opentelemetry.instrumentation.api.util.VirtualField; import io.opentelemetry.instrumentation.api.util.VirtualField;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.listener.RecordInterceptor; import org.springframework.kafka.listener.RecordInterceptor;
public final class InstrumentedRecordInterceptor<K, V> implements RecordInterceptor<K, V> { final class InstrumentedRecordInterceptor<K, V> implements RecordInterceptor<K, V> {
private final VirtualField<ConsumerRecord<K, V>, Context> receiveContextField; private static final VirtualField<ConsumerRecord<?, ?>, Context> receiveContextField =
private final VirtualField<ConsumerRecord<K, V>, State<ConsumerRecord<K, V>>> stateField; VirtualField.find(ConsumerRecord.class, Context.class);
private static final VirtualField<ConsumerRecord<?, ?>, State<ConsumerRecord<?, ?>>> stateField =
VirtualField.find(ConsumerRecord.class, State.class);
private final Instrumenter<ConsumerRecord<?, ?>, Void> processInstrumenter;
@Nullable private final RecordInterceptor<K, V> decorated; @Nullable private final RecordInterceptor<K, V> decorated;
public InstrumentedRecordInterceptor( InstrumentedRecordInterceptor(
VirtualField<ConsumerRecord<K, V>, Context> receiveContextField, Instrumenter<ConsumerRecord<?, ?>, Void> processInstrumenter,
VirtualField<ConsumerRecord<K, V>, State<ConsumerRecord<K, V>>> stateField,
@Nullable RecordInterceptor<K, V> decorated) { @Nullable RecordInterceptor<K, V> decorated) {
this.receiveContextField = receiveContextField; this.processInstrumenter = processInstrumenter;
this.stateField = stateField;
this.decorated = decorated; this.decorated = decorated;
} }
@ -46,8 +47,8 @@ public final class InstrumentedRecordInterceptor<K, V> implements RecordIntercep
private void start(ConsumerRecord<K, V> record) { private void start(ConsumerRecord<K, V> record) {
Context parentContext = getParentContext(record); Context parentContext = getParentContext(record);
if (processInstrumenter().shouldStart(parentContext, record)) { if (processInstrumenter.shouldStart(parentContext, record)) {
Context context = processInstrumenter().start(parentContext, record); Context context = processInstrumenter.start(parentContext, record);
Scope scope = context.makeCurrent(); Scope scope = context.makeCurrent();
stateField.set(record, State.create(record, context, scope)); stateField.set(record, State.create(record, context, scope));
} }
@ -77,11 +78,11 @@ public final class InstrumentedRecordInterceptor<K, V> implements RecordIntercep
} }
private void end(ConsumerRecord<K, V> record, @Nullable Throwable error) { private void end(ConsumerRecord<K, V> record, @Nullable Throwable error) {
State<ConsumerRecord<K, V>> state = stateField.get(record); State<ConsumerRecord<?, ?>> state = stateField.get(record);
stateField.set(record, null); stateField.set(record, null);
if (state != null) { if (state != null) {
state.scope().close(); state.scope().close();
processInstrumenter().end(state.context(), state.request(), null, error); processInstrumenter.end(state.context(), state.request(), null, error);
} }
} }
} }

View File

@ -3,7 +3,7 @@
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
package io.opentelemetry.javaagent.instrumentation.spring.kafka; package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import io.opentelemetry.instrumentation.api.instrumenter.ErrorCauseExtractor; import io.opentelemetry.instrumentation.api.instrumenter.ErrorCauseExtractor;
import org.springframework.kafka.listener.ListenerExecutionFailedException; import org.springframework.kafka.listener.ListenerExecutionFailedException;

View File

@ -0,0 +1,84 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import io.opentelemetry.api.OpenTelemetry;
import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.instrumentation.api.instrumenter.Instrumenter;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.springframework.kafka.listener.AbstractMessageListenerContainer;
import org.springframework.kafka.listener.BatchInterceptor;
import org.springframework.kafka.listener.RecordInterceptor;
/** Entrypoint for instrumenting Spring Kafka listeners. */
public final class SpringKafkaTelemetry {
/** Returns a new {@link SpringKafkaTelemetry} configured with the given {@link OpenTelemetry}. */
public static SpringKafkaTelemetry create(OpenTelemetry openTelemetry) {
return builder(openTelemetry).build();
}
/**
* Returns a new {@link SpringKafkaTelemetryBuilder} configured with the given {@link
* OpenTelemetry}.
*/
public static SpringKafkaTelemetryBuilder builder(OpenTelemetry openTelemetry) {
return new SpringKafkaTelemetryBuilder(openTelemetry);
}
private final Instrumenter<ConsumerRecord<?, ?>, Void> processInstrumenter;
private final Instrumenter<ConsumerRecords<?, ?>, Void> batchProcessInstrumenter;
SpringKafkaTelemetry(
Instrumenter<ConsumerRecord<?, ?>, Void> processInstrumenter,
Instrumenter<ConsumerRecords<?, ?>, Void> batchProcessInstrumenter) {
this.processInstrumenter = processInstrumenter;
this.batchProcessInstrumenter = batchProcessInstrumenter;
}
/**
* Returns a new {@link RecordInterceptor} that decorates a message listener with a {@link
* SpanKind#CONSUMER CONSUMER} span. Can be set on a {@link AbstractMessageListenerContainer}
* using the {@link AbstractMessageListenerContainer#setRecordInterceptor(RecordInterceptor)}
* method.
*/
public <K, V> RecordInterceptor<K, V> createRecordInterceptor() {
return createRecordInterceptor(null);
}
/**
* Returns a new {@link RecordInterceptor} that decorates a message listener with a {@link
* SpanKind#CONSUMER CONSUMER} span, and then delegates to a provided {@code
* decoratedInterceptor}. Can be set on a {@link AbstractMessageListenerContainer} using the
* {@link AbstractMessageListenerContainer#setRecordInterceptor(RecordInterceptor)} method.
*/
public <K, V> RecordInterceptor<K, V> createRecordInterceptor(
RecordInterceptor<K, V> decoratedInterceptor) {
return new InstrumentedRecordInterceptor<>(processInstrumenter, decoratedInterceptor);
}
/**
* Returns a new {@link BatchInterceptor} that decorates a message listener with a {@link
* SpanKind#CONSUMER CONSUMER} span. Can be set on a {@link AbstractMessageListenerContainer}
* using the {@link AbstractMessageListenerContainer#setBatchInterceptor(BatchInterceptor)}
* method.
*/
public <K, V> BatchInterceptor<K, V> createBatchInterceptor() {
return createBatchInterceptor(null);
}
/**
* Returns a new {@link BatchInterceptor} that decorates a message listener with a {@link
* SpanKind#CONSUMER CONSUMER} span, and then delegates to a provided {@code
* decoratedInterceptor}. Can be set on a {@link AbstractMessageListenerContainer} using the
* {@link AbstractMessageListenerContainer#setBatchInterceptor(BatchInterceptor)} method.
*/
public <K, V> BatchInterceptor<K, V> createBatchInterceptor(
BatchInterceptor<K, V> decoratedInterceptor) {
return new InstrumentedBatchInterceptor<>(batchProcessInstrumenter, decoratedInterceptor);
}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import io.opentelemetry.api.OpenTelemetry;
import io.opentelemetry.instrumentation.kafka.internal.KafkaInstrumenterFactory;
/** A builder of {@link SpringKafkaTelemetry}. */
public final class SpringKafkaTelemetryBuilder {
private static final String INSTRUMENTATION_NAME = "io.opentelemetry.spring-kafka-2.7";
private final OpenTelemetry openTelemetry;
private boolean captureExperimentalSpanAttributes = false;
private boolean propagationEnabled = true;
private boolean messagingReceiveInstrumentationEnabled = false;
SpringKafkaTelemetryBuilder(OpenTelemetry openTelemetry) {
this.openTelemetry = openTelemetry;
}
public SpringKafkaTelemetryBuilder setCaptureExperimentalSpanAttributes(
boolean captureExperimentalSpanAttributes) {
this.captureExperimentalSpanAttributes = captureExperimentalSpanAttributes;
return this;
}
public SpringKafkaTelemetryBuilder setPropagationEnabled(boolean propagationEnabled) {
this.propagationEnabled = propagationEnabled;
return this;
}
public SpringKafkaTelemetryBuilder setMessagingReceiveInstrumentationEnabled(
boolean messagingReceiveInstrumentationEnabled) {
this.messagingReceiveInstrumentationEnabled = messagingReceiveInstrumentationEnabled;
return this;
}
/**
* Returns a new {@link SpringKafkaTelemetry} with the settings of this {@link
* SpringKafkaTelemetryBuilder}.
*/
public SpringKafkaTelemetry build() {
KafkaInstrumenterFactory factory =
new KafkaInstrumenterFactory(openTelemetry, INSTRUMENTATION_NAME)
.setCaptureExperimentalSpanAttributes(captureExperimentalSpanAttributes)
.setPropagationEnabled(propagationEnabled)
.setMessagingReceiveInstrumentationEnabled(messagingReceiveInstrumentationEnabled)
.setErrorCauseExtractor(SpringKafkaErrorCauseExtractor.INSTANCE);
return new SpringKafkaTelemetry(
factory.createConsumerProcessInstrumenter(), factory.createBatchProcessInstrumenter());
}
}

View File

@ -0,0 +1,26 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import com.google.auto.value.AutoValue;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
@AutoValue
abstract class State<REQUEST> {
static <REQUEST> State<REQUEST> create(REQUEST request, Context context, Scope scope) {
return new AutoValue_State<>(request, context, scope);
}
abstract REQUEST request();
abstract Context context();
abstract Scope scope();
State() {}
}

View File

@ -0,0 +1,57 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.instrumentation.spring.kafka.v2_7;
import static java.util.Collections.singletonList;
import io.opentelemetry.instrumentation.kafkaclients.KafkaTelemetry;
import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
import io.opentelemetry.instrumentation.testing.junit.LibraryInstrumentationExtension;
import io.opentelemetry.testing.AbstractSpringKafkaNoReceiveTelemetryTest;
import java.util.List;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.springframework.boot.autoconfigure.kafka.DefaultKafkaProducerFactoryCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ContainerCustomizer;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
class SpringKafkaNoReceiveTelemetryTest extends AbstractSpringKafkaNoReceiveTelemetryTest {
@RegisterExtension
static final InstrumentationExtension testing = LibraryInstrumentationExtension.create();
@Override
protected InstrumentationExtension testing() {
return testing;
}
@Override
protected List<Class<?>> additionalSpringConfigs() {
return singletonList(KafkaInstrumentationConfig.class);
}
@Configuration
public static class KafkaInstrumentationConfig {
@Bean
public DefaultKafkaProducerFactoryCustomizer producerInstrumentation() {
KafkaTelemetry kafkaTelemetry = KafkaTelemetry.create(testing.getOpenTelemetry());
return producerFactory -> producerFactory.addPostProcessor(kafkaTelemetry::wrap);
}
@Bean
public ContainerCustomizer<String, String, ConcurrentMessageListenerContainer<String, String>>
listenerCustomizer() {
SpringKafkaTelemetry springKafkaTelemetry =
SpringKafkaTelemetry.create(testing.getOpenTelemetry());
return container -> {
container.setRecordInterceptor(springKafkaTelemetry.createRecordInterceptor());
container.setBatchInterceptor(springKafkaTelemetry.createBatchInterceptor());
};
}
}
}

View File

@ -0,0 +1,220 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.testing;
import static io.opentelemetry.instrumentation.testing.util.TelemetryDataUtil.orderByRootSpanKind;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.equalTo;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.satisfies;
import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.sdk.trace.data.SpanData;
import io.opentelemetry.sdk.trace.data.StatusData;
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import org.assertj.core.api.AbstractLongAssert;
import org.junit.jupiter.api.Test;
public abstract class AbstractSpringKafkaNoReceiveTelemetryTest extends AbstractSpringKafkaTest {
@Test
void shouldCreateSpansForSingleRecordProcess() {
testing()
.runWithSpan(
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testSingleTopic", "10", "testSpan");
return 0;
});
});
testing()
.waitAndAssertTraces(
trace ->
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testSingleTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(
SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testSingleTopic process")
.hasKind(SpanKind.CONSUMER)
.hasParent(trace.getSpan(1))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(
SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process"),
satisfies(
SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES,
AbstractLongAssert::isNotNegative),
satisfies(
SemanticAttributes.MESSAGING_KAFKA_PARTITION,
AbstractLongAssert::isNotNegative)),
span -> span.hasName("consumer").hasParent(trace.getSpan(2))));
}
@Test
void shouldHandleFailureInKafkaListener() {
testing()
.runWithSpan(
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testSingleTopic", "10", "error");
return 0;
});
});
testing()
.waitAndAssertTraces(
trace ->
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testSingleTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(
SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testSingleTopic process")
.hasKind(SpanKind.CONSUMER)
.hasParent(trace.getSpan(1))
.hasStatus(StatusData.error())
.hasException(new IllegalArgumentException("boom"))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(
SemanticAttributes.MESSAGING_DESTINATION, "testSingleTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process"),
satisfies(
SemanticAttributes.MESSAGING_MESSAGE_PAYLOAD_SIZE_BYTES,
AbstractLongAssert::isNotNegative),
satisfies(
SemanticAttributes.MESSAGING_KAFKA_PARTITION,
AbstractLongAssert::isNotNegative)),
span -> span.hasName("consumer").hasParent(trace.getSpan(2))));
}
@Test
void shouldCreateSpansForBatchReceiveAndProcess() throws InterruptedException {
Map<String, String> batchMessages = new HashMap<>();
batchMessages.put("10", "testSpan1");
batchMessages.put("20", "testSpan2");
sendBatchMessages(batchMessages);
AtomicReference<SpanData> producer1 = new AtomicReference<>();
AtomicReference<SpanData> producer2 = new AtomicReference<>();
testing()
.waitAndAssertSortedTraces(
orderByRootSpanKind(SpanKind.INTERNAL, SpanKind.CONSUMER),
trace -> {
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")));
producer1.set(trace.getSpan(1));
producer2.set(trace.getSpan(2));
},
trace ->
trace.hasSpansSatisfyingExactly(
span ->
span.hasName("testBatchTopic process")
.hasKind(SpanKind.CONSUMER)
.hasNoParent()
.hasLinksSatisfying(
links(
producer1.get().getSpanContext(),
producer2.get().getSpanContext()))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process")),
span -> span.hasName("consumer").hasParent(trace.getSpan(0))));
}
@Test
void shouldHandleFailureInKafkaBatchListener() {
testing()
.runWithSpan(
"producer",
() -> {
kafkaTemplate.executeInTransaction(
ops -> {
ops.send("testBatchTopic", "10", "error");
return 0;
});
});
AtomicReference<SpanData> producer = new AtomicReference<>();
testing()
.waitAndAssertSortedTraces(
orderByRootSpanKind(SpanKind.INTERNAL, SpanKind.CONSUMER),
trace -> {
trace.hasSpansSatisfyingExactly(
span -> span.hasName("producer"),
span ->
span.hasName("testBatchTopic send")
.hasKind(SpanKind.PRODUCER)
.hasParent(trace.getSpan(0))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic")));
producer.set(trace.getSpan(1));
},
trace ->
trace.hasSpansSatisfyingExactly(
span ->
span.hasName("testBatchTopic process")
.hasKind(SpanKind.CONSUMER)
.hasNoParent()
.hasLinksSatisfying(links(producer.get().getSpanContext()))
.hasStatus(StatusData.error())
.hasException(new IllegalArgumentException("boom"))
.hasAttributesSatisfyingExactly(
equalTo(SemanticAttributes.MESSAGING_SYSTEM, "kafka"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION, "testBatchTopic"),
equalTo(SemanticAttributes.MESSAGING_DESTINATION_KIND, "topic"),
equalTo(SemanticAttributes.MESSAGING_OPERATION, "process")),
span -> span.hasName("consumer").hasParent(trace.getSpan(0))));
}
}

View File

@ -5,14 +5,20 @@
package io.opentelemetry.testing; package io.opentelemetry.testing;
import io.opentelemetry.instrumentation.testing.junit.AgentInstrumentationExtension; import static org.assertj.core.api.Assertions.assertThat;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension; import io.opentelemetry.instrumentation.testing.junit.InstrumentationExtension;
import io.opentelemetry.sdk.trace.data.LinkData;
import java.time.Duration; import java.time.Duration;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Consumer;
import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
@ -26,22 +32,32 @@ public abstract class AbstractSpringKafkaTest {
private static final Logger logger = LoggerFactory.getLogger(AbstractSpringKafkaTest.class); private static final Logger logger = LoggerFactory.getLogger(AbstractSpringKafkaTest.class);
@RegisterExtension
protected static final InstrumentationExtension testing = AgentInstrumentationExtension.create();
static KafkaContainer kafka; static KafkaContainer kafka;
static ConfigurableApplicationContext applicationContext;
protected static KafkaTemplate<String, String> kafkaTemplate;
@SuppressWarnings("unchecked") ConfigurableApplicationContext applicationContext;
protected KafkaTemplate<String, String> kafkaTemplate;
@BeforeAll @BeforeAll
static void setUp() { static void setUpKafka() {
kafka = kafka =
new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3")) new KafkaContainer(DockerImageName.parse("confluentinc/cp-kafka:5.4.3"))
.waitingFor(Wait.forLogMessage(".*started \\(kafka.server.KafkaServer\\).*", 1)) .waitingFor(Wait.forLogMessage(".*started \\(kafka.server.KafkaServer\\).*", 1))
.withStartupTimeout(Duration.ofMinutes(1)); .withStartupTimeout(Duration.ofMinutes(1));
kafka.start(); kafka.start();
}
@AfterAll
static void tearDownKafka() {
kafka.stop();
}
protected abstract InstrumentationExtension testing();
protected abstract List<Class<?>> additionalSpringConfigs();
@SuppressWarnings("unchecked")
@BeforeEach
void setUpApp() {
Map<String, Object> props = new HashMap<>(); Map<String, Object> props = new HashMap<>();
props.put("spring.jmx.enabled", false); props.put("spring.jmx.enabled", false);
props.put("spring.main.web-application-type", "none"); props.put("spring.main.web-application-type", "none");
@ -53,16 +69,16 @@ public abstract class AbstractSpringKafkaTest {
props.put("spring.kafka.producer.transaction-id-prefix", "test-"); props.put("spring.kafka.producer.transaction-id-prefix", "test-");
SpringApplication app = new SpringApplication(ConsumerConfig.class); SpringApplication app = new SpringApplication(ConsumerConfig.class);
app.addPrimarySources(additionalSpringConfigs());
app.setDefaultProperties(props); app.setDefaultProperties(props);
applicationContext = app.run(); applicationContext = app.run();
kafkaTemplate = applicationContext.getBean("kafkaTemplate", KafkaTemplate.class); kafkaTemplate = applicationContext.getBean("kafkaTemplate", KafkaTemplate.class);
} }
@AfterAll @AfterEach
static void tearDown() { void tearDownApp() {
kafka.stop();
if (applicationContext != null) { if (applicationContext != null) {
applicationContext.stop(); applicationContext.close();
} }
} }
@ -75,25 +91,45 @@ public abstract class AbstractSpringKafkaTest {
for (int i = 1; i <= maxAttempts; i++) { for (int i = 1; i <= maxAttempts; i++) {
BatchRecordListener.reset(); BatchRecordListener.reset();
testing.runWithSpan( testing()
"producer", .runWithSpan(
() -> { "producer",
kafkaTemplate.executeInTransaction( () -> {
ops -> { kafkaTemplate.executeInTransaction(
keyToData.forEach((key, data) -> ops.send("testBatchTopic", key, data)); ops -> {
return 0; keyToData.forEach((key, data) -> ops.send("testBatchTopic", key, data));
}); return 0;
}); });
});
BatchRecordListener.waitForMessages(); BatchRecordListener.waitForMessages();
if (BatchRecordListener.getLastBatchSize() == 2) { if (BatchRecordListener.getLastBatchSize() == 2) {
break; break;
} else if (i < maxAttempts) { } else if (i < maxAttempts) {
testing.waitForTraces(2); testing().waitForTraces(2);
Thread.sleep(1_000); // sleep a bit to give time for all the spans to arrive Thread.sleep(1_000); // sleep a bit to give time for all the spans to arrive
testing.clearData(); testing().clearData();
logger.info("Messages weren't received as batch, retrying"); logger.info("Messages weren't received as batch, retrying");
} }
} }
} }
protected static Consumer<List<? extends LinkData>> links(SpanContext... spanContexts) {
return links -> {
assertThat(links).hasSize(spanContexts.length);
for (SpanContext spanContext : spanContexts) {
assertThat(links)
.anySatisfy(
link -> {
assertThat(link.getSpanContext().getTraceId())
.isEqualTo(spanContext.getTraceId());
assertThat(link.getSpanContext().getSpanId()).isEqualTo(spanContext.getSpanId());
assertThat(link.getSpanContext().getTraceFlags())
.isEqualTo(spanContext.getTraceFlags());
assertThat(link.getSpanContext().getTraceState())
.isEqualTo(spanContext.getTraceState());
});
}
};
}
} }

View File

@ -6,12 +6,15 @@
package io.opentelemetry.testing; package io.opentelemetry.testing;
import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.SpringBootConfiguration; import org.springframework.boot.SpringBootConfiguration;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.ContainerCustomizer;
import org.springframework.kafka.config.TopicBuilder; import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.ConsumerFactory; import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
@SpringBootConfiguration @SpringBootConfiguration
@EnableAutoConfiguration @EnableAutoConfiguration
@ -39,7 +42,11 @@ public class ConsumerConfig {
@Bean @Bean
public ConcurrentKafkaListenerContainerFactory<String, String> batchFactory( public ConcurrentKafkaListenerContainerFactory<String, String> batchFactory(
ConsumerFactory<String, String> consumerFactory) { ConsumerFactory<String, String> consumerFactory,
ObjectProvider<
ContainerCustomizer<
String, String, ConcurrentMessageListenerContainer<String, String>>>
customizerProvider) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>(); new ConcurrentKafkaListenerContainerFactory<>();
// do not retry failed records // do not retry failed records
@ -47,12 +54,17 @@ public class ConsumerConfig {
factory.setConsumerFactory(consumerFactory); factory.setConsumerFactory(consumerFactory);
factory.setBatchListener(true); factory.setBatchListener(true);
factory.setAutoStartup(true); factory.setAutoStartup(true);
customizerProvider.ifAvailable(factory::setContainerCustomizer);
return factory; return factory;
} }
@Bean @Bean
public ConcurrentKafkaListenerContainerFactory<String, String> singleFactory( public ConcurrentKafkaListenerContainerFactory<String, String> singleFactory(
ConsumerFactory<String, String> consumerFactory) { ConsumerFactory<String, String> consumerFactory,
ObjectProvider<
ContainerCustomizer<
String, String, ConcurrentMessageListenerContainer<String, String>>>
customizerProvider) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>(); new ConcurrentKafkaListenerContainerFactory<>();
// do not retry failed records // do not retry failed records
@ -60,6 +72,7 @@ public class ConsumerConfig {
factory.setConsumerFactory(consumerFactory); factory.setConsumerFactory(consumerFactory);
factory.setBatchListener(false); factory.setBatchListener(false);
factory.setAutoStartup(true); factory.setAutoStartup(true);
customizerProvider.ifAvailable(factory::setContainerCustomizer);
return factory; return factory;
} }
} }

View File

@ -430,6 +430,7 @@ include(":instrumentation:spring:spring-integration-4.1:library")
include(":instrumentation:spring:spring-integration-4.1:testing") include(":instrumentation:spring:spring-integration-4.1:testing")
include(":instrumentation:spring:spring-jms-2.0:javaagent") include(":instrumentation:spring:spring-jms-2.0:javaagent")
include(":instrumentation:spring:spring-kafka-2.7:javaagent") include(":instrumentation:spring:spring-kafka-2.7:javaagent")
include(":instrumentation:spring:spring-kafka-2.7:library")
include(":instrumentation:spring:spring-kafka-2.7:testing") include(":instrumentation:spring:spring-kafka-2.7:testing")
include(":instrumentation:spring:spring-rabbit-1.0:javaagent") include(":instrumentation:spring:spring-rabbit-1.0:javaagent")
include(":instrumentation:spring:spring-rmi-4.0:javaagent") include(":instrumentation:spring:spring-rmi-4.0:javaagent")