Merge pull request #275 from DataDog/tyler/fix-kafka-consume

Advice shouldn’t reference fields from non-injected classes
This commit is contained in:
Tyler Benson 2018-03-30 11:25:52 +08:00 committed by GitHub
commit 4047f1bdfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 52 additions and 58 deletions

View File

@ -34,10 +34,6 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
"datadog.trace.instrumentation.kafka_clients.TracingIterable$TracingIterator",
"datadog.trace.instrumentation.kafka_clients.TracingIterable$SpanBuilderDecorator",
"datadog.trace.instrumentation.kafka_clients.KafkaConsumerInstrumentation$ConsumeScopeAction");
public static final ConsumeScopeAction CONSUME_ACTION = new ConsumeScopeAction();
private static final String OPERATION = "kafka.consume";
private static final String COMPONENT_NAME = "java-kafka";
public KafkaConsumerInstrumentation() {
super("kafka");
@ -75,7 +71,7 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
@Advice.OnMethodExit(suppress = Throwable.class)
public static void wrap(@Advice.Return(readOnly = false) Iterable<ConsumerRecord> iterable) {
iterable = new TracingIterable(iterable, OPERATION, CONSUME_ACTION);
iterable = new TracingIterable(iterable, "kafka.consume", ConsumeScopeAction.INSTANCE);
}
}
@ -83,12 +79,15 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
@Advice.OnMethodExit(suppress = Throwable.class)
public static void wrap(@Advice.Return(readOnly = false) Iterator<ConsumerRecord> iterator) {
iterator = new TracingIterable.TracingIterator(iterator, OPERATION, CONSUME_ACTION);
iterator =
new TracingIterable.TracingIterator(
iterator, "kafka.consume", ConsumeScopeAction.INSTANCE);
}
}
public static class ConsumeScopeAction
implements TracingIterable.SpanBuilderDecorator<ConsumerRecord> {
public static final ConsumeScopeAction INSTANCE = new ConsumeScopeAction();
@Override
public void decorate(final Tracer.SpanBuilder spanBuilder, final ConsumerRecord record) {
@ -101,7 +100,7 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
.withTag(DDTags.SERVICE_NAME, "kafka")
.withTag(DDTags.RESOURCE_NAME, "Consume Topic " + topic)
.withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER)
.withTag(Tags.COMPONENT.getKey(), COMPONENT_NAME)
.withTag(Tags.COMPONENT.getKey(), "java-kafka")
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER)
.withTag("partition", record.partition())
.withTag("offset", record.offset());

View File

@ -34,9 +34,6 @@ public class KafkaStreamsProcessorInstrumentation {
public static final HelperInjector HELPER_INJECTOR =
new HelperInjector("datadog.trace.instrumentation.kafka_streams.TextMapExtractAdapter");
private static final String OPERATION = "kafka.consume";
private static final String COMPONENT_NAME = "java-kafka";
@AutoService(Instrumenter.class)
public static class StartInstrumentation extends Instrumenter.Configurable {
@ -80,12 +77,12 @@ public class KafkaStreamsProcessorInstrumentation {
Format.Builtin.TEXT_MAP, new TextMapExtractAdapter(record.value.headers()));
GlobalTracer.get()
.buildSpan(OPERATION)
.buildSpan("kafka.consume")
.asChildOf(extractedContext)
.withTag(DDTags.SERVICE_NAME, "kafka")
.withTag(DDTags.RESOURCE_NAME, "Consume Topic " + record.topic())
.withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER)
.withTag(Tags.COMPONENT.getKey(), COMPONENT_NAME)
.withTag(Tags.COMPONENT.getKey(), "java-kafka")
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER)
.withTag("partition", record.partition())
.withTag("offset", record.offset())

View File

@ -17,56 +17,54 @@ import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.record.TimestampType;
// This is necessary because SourceNodeRecordDeserializer drops the headers. :-(
public class KafkaStreamsSourceNodeRecordDeserializerInstrumentation {
@AutoService(Instrumenter.class)
public class KafkaStreamsSourceNodeRecordDeserializerInstrumentation
extends Instrumenter.Configurable {
@AutoService(Instrumenter.class)
public static class StartInstrumentation extends Instrumenter.Configurable {
public KafkaStreamsSourceNodeRecordDeserializerInstrumentation() {
super("kafka", "kafka-streams");
}
public StartInstrumentation() {
super("kafka", "kafka-streams");
}
@Override
public AgentBuilder apply(final AgentBuilder agentBuilder) {
return agentBuilder
.type(
named("org.apache.kafka.streams.processor.internals.SourceNodeRecordDeserializer"),
classLoaderHasClasses("org.apache.kafka.streams.state.internals.KeyValueIterators"))
.transform(DDTransformers.defaultTransformers())
.transform(
DDAdvice.create()
.advice(
isMethod()
.and(isPublic())
.and(named("deserialize"))
.and(
takesArgument(
0, named("org.apache.kafka.clients.consumer.ConsumerRecord")))
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecord"))),
SaveHeadersAdvice.class.getName()))
.asDecorator();
}
@Override
public AgentBuilder apply(final AgentBuilder agentBuilder) {
return agentBuilder
.type(
named("org.apache.kafka.streams.processor.internals.SourceNodeRecordDeserializer"),
classLoaderHasClasses("org.apache.kafka.streams.state.internals.KeyValueIterators"))
.transform(DDTransformers.defaultTransformers())
.transform(
DDAdvice.create()
.advice(
isMethod()
.and(isPublic())
.and(named("deserialize"))
.and(
takesArgument(
0, named("org.apache.kafka.clients.consumer.ConsumerRecord")))
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecord"))),
SaveHeadersAdvice.class.getName()))
.asDecorator();
}
public static class SaveHeadersAdvice {
public static class SaveHeadersAdvice {
@Advice.OnMethodExit(suppress = Throwable.class)
public static void saveHeaders(
@Advice.Argument(0) final ConsumerRecord incoming,
@Advice.Return(readOnly = false) ConsumerRecord result) {
result =
new ConsumerRecord<>(
result.topic(),
result.partition(),
result.offset(),
result.timestamp(),
TimestampType.CREATE_TIME,
result.checksum(),
result.serializedKeySize(),
result.serializedValueSize(),
result.key(),
result.value(),
incoming.headers());
}
@Advice.OnMethodExit(suppress = Throwable.class)
public static void saveHeaders(
@Advice.Argument(0) final ConsumerRecord incoming,
@Advice.Return(readOnly = false) ConsumerRecord result) {
result =
new ConsumerRecord<>(
result.topic(),
result.partition(),
result.offset(),
result.timestamp(),
TimestampType.CREATE_TIME,
result.checksum(),
result.serializedKeySize(),
result.serializedValueSize(),
result.key(),
result.value(),
incoming.headers());
}
}
}