Merge pull request #275 from DataDog/tyler/fix-kafka-consume

Advice shouldn’t reference fields from non-injected classes
This commit is contained in:
Tyler Benson 2018-03-30 11:25:52 +08:00 committed by GitHub
commit 4047f1bdfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 52 additions and 58 deletions

View File

@ -34,10 +34,6 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
"datadog.trace.instrumentation.kafka_clients.TracingIterable$TracingIterator", "datadog.trace.instrumentation.kafka_clients.TracingIterable$TracingIterator",
"datadog.trace.instrumentation.kafka_clients.TracingIterable$SpanBuilderDecorator", "datadog.trace.instrumentation.kafka_clients.TracingIterable$SpanBuilderDecorator",
"datadog.trace.instrumentation.kafka_clients.KafkaConsumerInstrumentation$ConsumeScopeAction"); "datadog.trace.instrumentation.kafka_clients.KafkaConsumerInstrumentation$ConsumeScopeAction");
public static final ConsumeScopeAction CONSUME_ACTION = new ConsumeScopeAction();
private static final String OPERATION = "kafka.consume";
private static final String COMPONENT_NAME = "java-kafka";
public KafkaConsumerInstrumentation() { public KafkaConsumerInstrumentation() {
super("kafka"); super("kafka");
@ -75,7 +71,7 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
@Advice.OnMethodExit(suppress = Throwable.class) @Advice.OnMethodExit(suppress = Throwable.class)
public static void wrap(@Advice.Return(readOnly = false) Iterable<ConsumerRecord> iterable) { public static void wrap(@Advice.Return(readOnly = false) Iterable<ConsumerRecord> iterable) {
iterable = new TracingIterable(iterable, OPERATION, CONSUME_ACTION); iterable = new TracingIterable(iterable, "kafka.consume", ConsumeScopeAction.INSTANCE);
} }
} }
@ -83,12 +79,15 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
@Advice.OnMethodExit(suppress = Throwable.class) @Advice.OnMethodExit(suppress = Throwable.class)
public static void wrap(@Advice.Return(readOnly = false) Iterator<ConsumerRecord> iterator) { public static void wrap(@Advice.Return(readOnly = false) Iterator<ConsumerRecord> iterator) {
iterator = new TracingIterable.TracingIterator(iterator, OPERATION, CONSUME_ACTION); iterator =
new TracingIterable.TracingIterator(
iterator, "kafka.consume", ConsumeScopeAction.INSTANCE);
} }
} }
public static class ConsumeScopeAction public static class ConsumeScopeAction
implements TracingIterable.SpanBuilderDecorator<ConsumerRecord> { implements TracingIterable.SpanBuilderDecorator<ConsumerRecord> {
public static final ConsumeScopeAction INSTANCE = new ConsumeScopeAction();
@Override @Override
public void decorate(final Tracer.SpanBuilder spanBuilder, final ConsumerRecord record) { public void decorate(final Tracer.SpanBuilder spanBuilder, final ConsumerRecord record) {
@ -101,7 +100,7 @@ public final class KafkaConsumerInstrumentation extends Instrumenter.Configurabl
.withTag(DDTags.SERVICE_NAME, "kafka") .withTag(DDTags.SERVICE_NAME, "kafka")
.withTag(DDTags.RESOURCE_NAME, "Consume Topic " + topic) .withTag(DDTags.RESOURCE_NAME, "Consume Topic " + topic)
.withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER) .withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER)
.withTag(Tags.COMPONENT.getKey(), COMPONENT_NAME) .withTag(Tags.COMPONENT.getKey(), "java-kafka")
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER) .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER)
.withTag("partition", record.partition()) .withTag("partition", record.partition())
.withTag("offset", record.offset()); .withTag("offset", record.offset());

View File

@ -34,9 +34,6 @@ public class KafkaStreamsProcessorInstrumentation {
public static final HelperInjector HELPER_INJECTOR = public static final HelperInjector HELPER_INJECTOR =
new HelperInjector("datadog.trace.instrumentation.kafka_streams.TextMapExtractAdapter"); new HelperInjector("datadog.trace.instrumentation.kafka_streams.TextMapExtractAdapter");
private static final String OPERATION = "kafka.consume";
private static final String COMPONENT_NAME = "java-kafka";
@AutoService(Instrumenter.class) @AutoService(Instrumenter.class)
public static class StartInstrumentation extends Instrumenter.Configurable { public static class StartInstrumentation extends Instrumenter.Configurable {
@ -80,12 +77,12 @@ public class KafkaStreamsProcessorInstrumentation {
Format.Builtin.TEXT_MAP, new TextMapExtractAdapter(record.value.headers())); Format.Builtin.TEXT_MAP, new TextMapExtractAdapter(record.value.headers()));
GlobalTracer.get() GlobalTracer.get()
.buildSpan(OPERATION) .buildSpan("kafka.consume")
.asChildOf(extractedContext) .asChildOf(extractedContext)
.withTag(DDTags.SERVICE_NAME, "kafka") .withTag(DDTags.SERVICE_NAME, "kafka")
.withTag(DDTags.RESOURCE_NAME, "Consume Topic " + record.topic()) .withTag(DDTags.RESOURCE_NAME, "Consume Topic " + record.topic())
.withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER) .withTag(DDTags.SPAN_TYPE, DDSpanTypes.MESSAGE_CONSUMER)
.withTag(Tags.COMPONENT.getKey(), COMPONENT_NAME) .withTag(Tags.COMPONENT.getKey(), "java-kafka")
.withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER) .withTag(Tags.SPAN_KIND.getKey(), Tags.SPAN_KIND_CONSUMER)
.withTag("partition", record.partition()) .withTag("partition", record.partition())
.withTag("offset", record.offset()) .withTag("offset", record.offset())

View File

@ -17,56 +17,54 @@ import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.record.TimestampType;
// This is necessary because SourceNodeRecordDeserializer drops the headers. :-( // This is necessary because SourceNodeRecordDeserializer drops the headers. :-(
public class KafkaStreamsSourceNodeRecordDeserializerInstrumentation { @AutoService(Instrumenter.class)
public class KafkaStreamsSourceNodeRecordDeserializerInstrumentation
extends Instrumenter.Configurable {
@AutoService(Instrumenter.class) public KafkaStreamsSourceNodeRecordDeserializerInstrumentation() {
public static class StartInstrumentation extends Instrumenter.Configurable { super("kafka", "kafka-streams");
}
public StartInstrumentation() { @Override
super("kafka", "kafka-streams"); public AgentBuilder apply(final AgentBuilder agentBuilder) {
} return agentBuilder
.type(
named("org.apache.kafka.streams.processor.internals.SourceNodeRecordDeserializer"),
classLoaderHasClasses("org.apache.kafka.streams.state.internals.KeyValueIterators"))
.transform(DDTransformers.defaultTransformers())
.transform(
DDAdvice.create()
.advice(
isMethod()
.and(isPublic())
.and(named("deserialize"))
.and(
takesArgument(
0, named("org.apache.kafka.clients.consumer.ConsumerRecord")))
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecord"))),
SaveHeadersAdvice.class.getName()))
.asDecorator();
}
@Override public static class SaveHeadersAdvice {
public AgentBuilder apply(final AgentBuilder agentBuilder) {
return agentBuilder
.type(
named("org.apache.kafka.streams.processor.internals.SourceNodeRecordDeserializer"),
classLoaderHasClasses("org.apache.kafka.streams.state.internals.KeyValueIterators"))
.transform(DDTransformers.defaultTransformers())
.transform(
DDAdvice.create()
.advice(
isMethod()
.and(isPublic())
.and(named("deserialize"))
.and(
takesArgument(
0, named("org.apache.kafka.clients.consumer.ConsumerRecord")))
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecord"))),
SaveHeadersAdvice.class.getName()))
.asDecorator();
}
public static class SaveHeadersAdvice { @Advice.OnMethodExit(suppress = Throwable.class)
public static void saveHeaders(
@Advice.OnMethodExit(suppress = Throwable.class) @Advice.Argument(0) final ConsumerRecord incoming,
public static void saveHeaders( @Advice.Return(readOnly = false) ConsumerRecord result) {
@Advice.Argument(0) final ConsumerRecord incoming, result =
@Advice.Return(readOnly = false) ConsumerRecord result) { new ConsumerRecord<>(
result = result.topic(),
new ConsumerRecord<>( result.partition(),
result.topic(), result.offset(),
result.partition(), result.timestamp(),
result.offset(), TimestampType.CREATE_TIME,
result.timestamp(), result.checksum(),
TimestampType.CREATE_TIME, result.serializedKeySize(),
result.checksum(), result.serializedValueSize(),
result.serializedKeySize(), result.key(),
result.serializedValueSize(), result.value(),
result.key(), incoming.headers());
result.value(),
incoming.headers());
}
} }
} }
} }