Fix latestDepTests for Kafka Streams, Netty, and Okhttp

This commit is contained in:
Tyler Benson 2018-11-06 11:38:53 +10:00
parent a2a194d48a
commit 5e194ef06c
9 changed files with 130 additions and 110 deletions

View File

@ -21,6 +21,6 @@ public class TextMapInjectAdapter implements TextMap {
@Override @Override
public void put(final String key, final String value) { public void put(final String key, final String value) {
headers.add(key, value.getBytes(StandardCharsets.UTF_8)); headers.remove(key).add(key, value.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -35,8 +35,9 @@ class KafkaClientTest extends AgentTestRunner {
def consumerFactory = new DefaultKafkaConsumerFactory<String, String>(consumerProperties) def consumerFactory = new DefaultKafkaConsumerFactory<String, String>(consumerProperties)
// set the topic that needs to be consumed // set the topic that needs to be consumed
def containerProperties = null def containerProperties
try { try {
// Different class names for test and latestDepTest.
containerProperties = Class.forName("org.springframework.kafka.listener.config.ContainerProperties").newInstance(SHARED_TOPIC) containerProperties = Class.forName("org.springframework.kafka.listener.config.ContainerProperties").newInstance(SHARED_TOPIC)
} catch (ClassNotFoundException | NoClassDefFoundError e) { } catch (ClassNotFoundException | NoClassDefFoundError e) {
containerProperties = Class.forName("org.springframework.kafka.listener.ContainerProperties").newInstance(SHARED_TOPIC) containerProperties = Class.forName("org.springframework.kafka.listener.ContainerProperties").newInstance(SHARED_TOPIC)

View File

@ -40,4 +40,5 @@ dependencies {
latestDepTestCompile group: 'org.apache.kafka', name: 'kafka-streams', version: '+' latestDepTestCompile group: 'org.apache.kafka', name: 'kafka-streams', version: '+'
latestDepTestCompile group: 'org.springframework.kafka', name: 'spring-kafka', version: '+' latestDepTestCompile group: 'org.springframework.kafka', name: 'spring-kafka', version: '+'
latestDepTestCompile group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '+' latestDepTestCompile group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '+'
latestDepTestCompile group: 'org.assertj', name: 'assertj-core', version: '3.+'
} }

View File

@ -1,11 +1,9 @@
import datadog.trace.agent.test.AgentTestRunner import datadog.trace.agent.test.AgentTestRunner
import datadog.trace.api.Config
import org.apache.kafka.clients.consumer.ConsumerRecord import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.serialization.Serdes import org.apache.kafka.common.serialization.Serdes
import org.apache.kafka.streams.KafkaStreams import org.apache.kafka.streams.KafkaStreams
import org.apache.kafka.streams.StreamsConfig import org.apache.kafka.streams.StreamsConfig
import org.apache.kafka.streams.kstream.KStream import org.apache.kafka.streams.kstream.KStream
import org.apache.kafka.streams.kstream.KStreamBuilder
import org.apache.kafka.streams.kstream.ValueMapper import org.apache.kafka.streams.kstream.ValueMapper
import org.junit.ClassRule import org.junit.ClassRule
import org.springframework.kafka.core.DefaultKafkaConsumerFactory import org.springframework.kafka.core.DefaultKafkaConsumerFactory
@ -13,7 +11,6 @@ import org.springframework.kafka.core.DefaultKafkaProducerFactory
import org.springframework.kafka.core.KafkaTemplate import org.springframework.kafka.core.KafkaTemplate
import org.springframework.kafka.listener.KafkaMessageListenerContainer import org.springframework.kafka.listener.KafkaMessageListenerContainer
import org.springframework.kafka.listener.MessageListener import org.springframework.kafka.listener.MessageListener
import org.springframework.kafka.listener.config.ContainerProperties
import org.springframework.kafka.test.rule.KafkaEmbedded import org.springframework.kafka.test.rule.KafkaEmbedded
import org.springframework.kafka.test.utils.ContainerTestUtils import org.springframework.kafka.test.utils.ContainerTestUtils
import org.springframework.kafka.test.utils.KafkaTestUtils import org.springframework.kafka.test.utils.KafkaTestUtils
@ -41,7 +38,15 @@ class KafkaStreamsTest extends AgentTestRunner {
// CONFIGURE CONSUMER // CONFIGURE CONSUMER
def consumerFactory = new DefaultKafkaConsumerFactory<String, String>(KafkaTestUtils.consumerProps("sender", "false", embeddedKafka)) def consumerFactory = new DefaultKafkaConsumerFactory<String, String>(KafkaTestUtils.consumerProps("sender", "false", embeddedKafka))
def consumerContainer = new KafkaMessageListenerContainer<>(consumerFactory, new ContainerProperties(STREAM_PROCESSED))
def containerProperties
try {
// Different class names for test and latestDepTest.
containerProperties = Class.forName("org.springframework.kafka.listener.config.ContainerProperties").newInstance(STREAM_PROCESSED)
} catch (ClassNotFoundException | NoClassDefFoundError e) {
containerProperties = Class.forName("org.springframework.kafka.listener.ContainerProperties").newInstance(STREAM_PROCESSED)
}
def consumerContainer = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties)
// create a thread safe queue to store the processed message // create a thread safe queue to store the processed message
def records = new LinkedBlockingQueue<ConsumerRecord<String, String>>() def records = new LinkedBlockingQueue<ConsumerRecord<String, String>>()
@ -65,9 +70,15 @@ class KafkaStreamsTest extends AgentTestRunner {
ContainerTestUtils.waitForAssignment(consumerContainer, embeddedKafka.getPartitionsPerTopic()) ContainerTestUtils.waitForAssignment(consumerContainer, embeddedKafka.getPartitionsPerTopic())
// CONFIGURE PROCESSOR // CONFIGURE PROCESSOR
final KStreamBuilder builder = new KStreamBuilder() def builder
try {
// Different class names for test and latestDepTest.
builder = Class.forName("org.apache.kafka.streams.kstream.KStreamBuilder").newInstance()
} catch (ClassNotFoundException | NoClassDefFoundError e) {
builder = Class.forName("org.apache.kafka.streams.StreamsBuilder").newInstance()
}
KStream<String, String> textLines = builder.stream(STREAM_PENDING) KStream<String, String> textLines = builder.stream(STREAM_PENDING)
textLines def values = textLines
.mapValues(new ValueMapper<String, String>() { .mapValues(new ValueMapper<String, String>() {
@Override @Override
String apply(String textLine) { String apply(String textLine) {
@ -76,8 +87,18 @@ class KafkaStreamsTest extends AgentTestRunner {
return textLine.toLowerCase() return textLine.toLowerCase()
} }
}) })
.to(Serdes.String(), Serdes.String(), STREAM_PROCESSED)
KafkaStreams streams = new KafkaStreams(builder, config) KafkaStreams streams
try {
// Different api for test and latestDepTest.
values.to(Serdes.String(), Serdes.String(), STREAM_PROCESSED)
streams = new KafkaStreams(builder, config)
} catch (MissingMethodException e) {
def producer = Class.forName("org.apache.kafka.streams.kstream.Produced")
.with(Serdes.String(), Serdes.String())
values.to(STREAM_PROCESSED, producer)
streams = new KafkaStreams(builder.build(), config)
}
streams.start() streams.start()
// CONFIGURE PRODUCER // CONFIGURE PRODUCER
@ -94,101 +115,89 @@ class KafkaStreamsTest extends AgentTestRunner {
received.value() == greeting.toLowerCase() received.value() == greeting.toLowerCase()
received.key() == null received.key() == null
TEST_WRITER.waitForTraces(3) assertTraces(3) {
TEST_WRITER.size() == 3 trace(0, 1) {
// PRODUCER span 0
span(0) {
serviceName "kafka"
operationName "kafka.produce"
resourceName "Produce Topic $STREAM_PENDING"
spanType "queue"
errored false
parent()
tags {
"component" "java-kafka"
"span.kind" "producer"
"span.type" "queue"
defaultTags()
}
}
}
trace(1, 2) {
def t1 = TEST_WRITER.get(0) // STREAMING span 0
t1.size() == 1 span(0) {
def t2 = TEST_WRITER.get(1) serviceName "kafka"
t2.size() == 2 operationName "kafka.produce"
def t3 = TEST_WRITER.get(2) resourceName "Produce Topic $STREAM_PROCESSED"
t3.size() == 1 spanType "queue"
errored false
childOf span(1)
and: // PRODUCER span 0 tags {
def t1span1 = t1[0] "component" "java-kafka"
"span.kind" "producer"
"span.type" "queue"
defaultTags()
}
}
t1span1.context().operationName == "kafka.produce" // STREAMING span 1
t1span1.serviceName == "kafka" span(1) {
t1span1.resourceName == "Produce Topic $STREAM_PENDING" serviceName "kafka"
t1span1.type == "queue" operationName "kafka.consume"
!t1span1.context().getErrorFlag() resourceName "Consume Topic $STREAM_PENDING"
t1span1.context().parentId == "0" spanType "queue"
errored false
childOf TEST_WRITER[0][0]
def t1tags1 = t1span1.context().tags tags {
t1tags1["component"] == "java-kafka" "component" "java-kafka"
t1tags1["span.kind"] == "producer" "span.kind" "consumer"
t1tags1["span.type"] == "queue" "span.type" "queue"
t1tags1["thread.name"] != null "partition" { it >= 0 }
t1tags1["thread.id"] != null "offset" 0
t1tags1[Config.RUNTIME_ID_TAG] == Config.get().runtimeId defaultTags(true)
t1tags1.size() == 6 "asdf" "testing"
}
and: // STREAMING span 0 }
def t2span1 = t2[0] }
trace(2, 1) {
t2span1.context().operationName == "kafka.produce" // CONSUMER span 0
t2span1.serviceName == "kafka" span(0) {
t2span1.resourceName == "Produce Topic $STREAM_PROCESSED" serviceName "kafka"
t2span1.type == "queue" operationName "kafka.consume"
!t2span1.context().getErrorFlag() resourceName "Consume Topic $STREAM_PROCESSED"
spanType "queue"
def t2tags1 = t2span1.context().tags errored false
t2tags1["component"] == "java-kafka" childOf TEST_WRITER[1][0]
t2tags1["span.kind"] == "producer" tags {
t2tags1["span.type"] == "queue" "component" "java-kafka"
t2tags1["thread.name"] != null "span.kind" "consumer"
t2tags1["thread.id"] != null "span.type" "queue"
t2tags1.size() == 5 "partition" { it >= 0 }
"offset" 0
and: // STREAMING span 1 defaultTags(true)
def t2span2 = t2[1] "testing" 123
t2span1.context().parentId == t2span2.context().spanId }
}
t2span2.context().operationName == "kafka.consume" }
t2span2.serviceName == "kafka" }
t2span2.resourceName == "Consume Topic $STREAM_PENDING"
t2span2.type == "queue"
!t2span2.context().getErrorFlag()
t2span2.context().parentId == t1span1.context().spanId
def t2tags2 = t2span2.context().tags
t2tags2["component"] == "java-kafka"
t2tags2["span.kind"] == "consumer"
t2tags2["span.type"] == "queue"
t2tags2["partition"] >= 0
t2tags2["offset"] == 0
t2tags2["thread.name"] != null
t2tags2["thread.id"] != null
t2tags2[Config.RUNTIME_ID_TAG] == Config.get().runtimeId
t2tags2["asdf"] == "testing"
t2tags2.size() == 9
and: // CONSUMER span 0
def t3span1 = t3[0]
t3span1.context().operationName == "kafka.consume"
t3span1.serviceName == "kafka"
t3span1.resourceName == "Consume Topic $STREAM_PROCESSED"
t3span1.type == "queue"
!t3span1.context().getErrorFlag()
t3span1.context().parentId == t2span1.context().spanId
def t3tags1 = t3span1.context().tags
t3tags1["component"] == "java-kafka"
t3tags1["span.kind"] == "consumer"
t3tags1["span.type"] == "queue"
t3tags1["partition"] >= 0
t3tags1["offset"] == 0
t3tags1["thread.name"] != null
t3tags1["thread.id"] != null
t3tags1[Config.RUNTIME_ID_TAG] == Config.get().runtimeId
t3tags1["testing"] == 123
t3tags1.size() == 9
def headers = received.headers() def headers = received.headers()
headers.iterator().hasNext() headers.iterator().hasNext()
new String(headers.headers("x-datadog-trace-id").iterator().next().value()) == "$t2span1.traceId" new String(headers.headers("x-datadog-trace-id").iterator().next().value()) == "${TEST_WRITER[1][0].traceId}"
new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "$t2span1.spanId" new String(headers.headers("x-datadog-parent-id").iterator().next().value()) == "${TEST_WRITER[1][0].spanId}"
cleanup: cleanup:

View File

@ -2,6 +2,7 @@ import datadog.trace.agent.test.AgentTestRunner
import datadog.trace.agent.test.TestUtils import datadog.trace.agent.test.TestUtils
import datadog.trace.api.DDSpanTypes import datadog.trace.api.DDSpanTypes
import datadog.trace.api.DDTags import datadog.trace.api.DDTags
import io.netty.channel.AbstractChannel
import io.opentracing.tag.Tags import io.opentracing.tag.Tags
import org.asynchttpclient.AsyncHttpClient import org.asynchttpclient.AsyncHttpClient
import org.asynchttpclient.DefaultAsyncHttpClientConfig import org.asynchttpclient.DefaultAsyncHttpClientConfig
@ -105,7 +106,11 @@ class Netty40ClientTest extends AgentTestRunner {
errored true errored true
tags { tags {
"$Tags.COMPONENT.key" "netty" "$Tags.COMPONENT.key" "netty"
try {
errorTags ConnectException, "Connection refused: localhost/127.0.0.1:$invalidPort" errorTags ConnectException, "Connection refused: localhost/127.0.0.1:$invalidPort"
} catch (AssertionError e) {
errorTags AbstractChannel.AnnotatedConnectException, "Connection refused: localhost/127.0.0.1:$invalidPort"
}
defaultTags() defaultTags()
} }
} }

View File

@ -43,7 +43,8 @@ dependencies {
testCompile group: 'io.netty', name: 'netty-codec-http', version: '4.1.0.Final' testCompile group: 'io.netty', name: 'netty-codec-http', version: '4.1.0.Final'
testCompile group: 'org.asynchttpclient', name: 'async-http-client', version: '2.1.0' testCompile group: 'org.asynchttpclient', name: 'async-http-client', version: '2.1.0'
latestDepTestCompile group: 'io.netty', name: 'netty-codec-http', version: '+' latestDepTestCompile group: 'io.netty', name: 'netty-codec-http', version: '(,5.0)'
// latest async-http-client incompatable with 5.0+ netty
latestDepTestCompile group: 'org.asynchttpclient', name: 'async-http-client', version: '+' latestDepTestCompile group: 'org.asynchttpclient', name: 'async-http-client', version: '+'
} }

View File

@ -27,7 +27,10 @@ dependencies {
annotationProcessor deps.autoservice annotationProcessor deps.autoservice
implementation deps.autoservice implementation deps.autoservice
testCompile project(':dd-java-agent:testing') testCompile(project(':dd-java-agent:testing')) {
testCompile group: 'com.squareup.okhttp3', name: 'okhttp', version: '3.0.0' exclude module: 'okhttp'
latestDepTestCompile group: 'com.squareup.okhttp3', name: 'okhttp', version: '+' }
testCompile group: 'com.squareup.okhttp3', name: 'okhttp', version: '3.0.0'
latestDepTestCompile group: 'com.squareup.okhttp3', name: 'okhttp', version: '[3.11.0,)'
} }

View File

@ -20,13 +20,13 @@ muzzle {
} }
} }
apply plugin: 'org.unbroken-dome.test-sets' //apply plugin: 'org.unbroken-dome.test-sets'
//
testSets { //testSets {
latestDepTest { // latestDepTest {
dirName = 'test' // dirName = 'test'
} // }
} //}
sourceCompatibility = 1.8 sourceCompatibility = 1.8
targetCompatibility = 1.8 targetCompatibility = 1.8

View File

@ -119,7 +119,7 @@ artifacts {
} }
project.afterEvaluate { project.afterEvaluate {
if (project.plugins.hasPlugin('org.unbroken-dome.test-sets')) { if (project.plugins.hasPlugin('org.unbroken-dome.test-sets') && configurations.hasProperty("latestDepTestRuntime")) {
tasks.withType(Test) { tasks.withType(Test) {
doFirst{ doFirst{
def testArtifacts = configurations.testRuntime.resolvedConfiguration.resolvedArtifacts def testArtifacts = configurations.testRuntime.resolvedConfiguration.resolvedArtifacts