Merge pull request #731 from DataDog/tyler/synthetics

Implement trace origin header and propagation support
This commit is contained in:
Tyler Benson 2019-02-26 09:01:37 -08:00 committed by GitHub
commit 0cdc80aa14
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 188 additions and 211 deletions

View File

@ -36,6 +36,9 @@ subprojects { subProj ->
classPath = project(':dd-java-agent:agent-tooling').configurations.instrumentationMuzzle + subProj.configurations.compile + subProj.sourceSets.main.output classPath = project(':dd-java-agent:agent-tooling').configurations.instrumentationMuzzle + subProj.configurations.compile + subProj.sourceSets.main.output
} }
} }
// Make it so all instrumentation subproject tests can be run with a single command.
instr_project.tasks.test.dependsOn(subProj.tasks.test)
} }
instr_project.dependencies { instr_project.dependencies {

View File

@ -47,7 +47,6 @@ class TestHttpServer implements AutoCloseable {
private TestHttpServer() { private TestHttpServer() {
int port = PortUtils.randomOpenPort() int port = PortUtils.randomOpenPort()
internalServer = new Server(port) internalServer = new Server(port)
internalServer.stopAtShutdown = true
address = new URI("http://localhost:$port") address = new URI("http://localhost:$port")
} }
@ -62,6 +61,8 @@ class TestHttpServer implements AutoCloseable {
internalServer.handler = handlerList internalServer.handler = handlerList
System.out.println("Starting server $this on port $address.port") System.out.println("Starting server $this on port $address.port")
internalServer.start() internalServer.start()
// set after starting, otherwise two callbacks get added.
internalServer.stopAtShutdown = true
return this return this
} }

View File

@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j;
public class DDSpanContext implements io.opentracing.SpanContext { public class DDSpanContext implements io.opentracing.SpanContext {
public static final String PRIORITY_SAMPLING_KEY = "_sampling_priority_v1"; public static final String PRIORITY_SAMPLING_KEY = "_sampling_priority_v1";
public static final String SAMPLE_RATE_KEY = "_sample_rate"; public static final String SAMPLE_RATE_KEY = "_sample_rate";
public static final String ORIGIN_KEY = "_dd.origin";
private static final Map<String, Number> EMPTY_METRICS = Collections.emptyMap(); private static final Map<String, Number> EMPTY_METRICS = Collections.emptyMap();
@ -62,6 +63,8 @@ public class DDSpanContext implements io.opentracing.SpanContext {
* <p>For thread safety, this boolean is only modified or accessed under instance lock. * <p>For thread safety, this boolean is only modified or accessed under instance lock.
*/ */
private boolean samplingPriorityLocked = false; private boolean samplingPriorityLocked = false;
/** The origin of the trace. (eg. Synthetics) */
private final String origin;
/** Metrics on the span */ /** Metrics on the span */
private final AtomicReference<Map<String, Number>> metrics = new AtomicReference<>(); private final AtomicReference<Map<String, Number>> metrics = new AtomicReference<>();
@ -77,6 +80,7 @@ public class DDSpanContext implements io.opentracing.SpanContext {
final String operationName, final String operationName,
final String resourceName, final String resourceName,
final int samplingPriority, final int samplingPriority,
final String origin,
final Map<String, String> baggageItems, final Map<String, String> baggageItems,
final boolean errorFlag, final boolean errorFlag,
final String spanType, final String spanType,
@ -111,10 +115,20 @@ public class DDSpanContext implements io.opentracing.SpanContext {
this.resourceName = resourceName; this.resourceName = resourceName;
this.errorFlag = errorFlag; this.errorFlag = errorFlag;
this.spanType = spanType; this.spanType = spanType;
this.origin = origin;
if (samplingPriority != PrioritySampling.UNSET) { if (samplingPriority != PrioritySampling.UNSET) {
setSamplingPriority(samplingPriority); setSamplingPriority(samplingPriority);
} }
if (spanType != null) {
this.tags.put(DDTags.SPAN_TYPE, spanType);
}
if (origin != null) {
this.tags.put(ORIGIN_KEY, origin);
}
this.tags.put(DDTags.THREAD_NAME, threadName);
this.tags.put(DDTags.THREAD_ID, threadId);
} }
public String getTraceId() { public String getTraceId() {
@ -167,6 +181,11 @@ public class DDSpanContext implements io.opentracing.SpanContext {
public void setSpanType(final String spanType) { public void setSpanType(final String spanType) {
this.spanType = spanType; this.spanType = spanType;
if (spanType == null) {
tags.remove(DDTags.SPAN_TYPE);
} else {
tags.put(DDTags.SPAN_TYPE, spanType);
}
} }
public void setSamplingPriority(final int newPriority) { public void setSamplingPriority(final int newPriority) {
@ -236,6 +255,15 @@ public class DDSpanContext implements io.opentracing.SpanContext {
} }
} }
public String getOrigin() {
final DDSpan rootSpan = trace.getRootSpan();
if (null != rootSpan) {
return rootSpan.context().origin;
} else {
return origin;
}
}
public void setBaggageItem(final String key, final String value) { public void setBaggageItem(final String key, final String value) {
baggageItems.put(key, value); baggageItems.put(key, value);
} }
@ -312,12 +340,6 @@ public class DDSpanContext implements io.opentracing.SpanContext {
} }
public synchronized Map<String, Object> getTags() { public synchronized Map<String, Object> getTags() {
tags.put(DDTags.THREAD_NAME, threadName);
tags.put(DDTags.THREAD_ID, threadId);
final String spanType = getSpanType();
if (spanType != null) {
tags.put(DDTags.SPAN_TYPE, spanType);
}
return Collections.unmodifiableMap(tags); return Collections.unmodifiableMap(tags);
} }

View File

@ -610,6 +610,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
final Map<String, String> baggage; final Map<String, String> baggage;
final PendingTrace parentTrace; final PendingTrace parentTrace;
final int samplingPriority; final int samplingPriority;
final String origin;
final DDSpanContext context; final DDSpanContext context;
SpanContext parentContext = parent; SpanContext parentContext = parent;
@ -629,6 +630,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
baggage = ddsc.getBaggageItems(); baggage = ddsc.getBaggageItems();
parentTrace = ddsc.getTrace(); parentTrace = ddsc.getTrace();
samplingPriority = PrioritySampling.UNSET; samplingPriority = PrioritySampling.UNSET;
origin = null;
if (serviceName == null) { if (serviceName == null) {
serviceName = ddsc.getServiceName(); serviceName = ddsc.getServiceName();
} }
@ -651,10 +653,15 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
samplingPriority = PrioritySampling.UNSET; samplingPriority = PrioritySampling.UNSET;
baggage = null; baggage = null;
} }
// Get header tags whether propagating or not.
// Get header tags and set origin whether propagating or not.
if (parentContext instanceof TagContext) { if (parentContext instanceof TagContext) {
tags.putAll(((TagContext) parentContext).getTags()); tags.putAll(((TagContext) parentContext).getTags());
origin = ((TagContext) parentContext).getOrigin();
} else {
origin = null;
} }
// add runtime tags to the root span // add runtime tags to the root span
for (final Map.Entry<String, String> runtimeTag : runtimeTags.entrySet()) { for (final Map.Entry<String, String> runtimeTag : runtimeTags.entrySet()) {
tags.put(runtimeTag.getKey(), runtimeTag.getValue()); tags.put(runtimeTag.getKey(), runtimeTag.getValue());
@ -679,6 +686,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
operationName, operationName,
resourceName, resourceName,
samplingPriority, samplingPriority,
origin,
baggage, baggage,
errorFlag, errorFlag,
spanType, spanType,

View File

@ -7,13 +7,13 @@ public class SpanTypeDecorator extends AbstractDecorator {
public SpanTypeDecorator() { public SpanTypeDecorator() {
super(); super();
this.setMatchingTag(DDTags.SPAN_TYPE); setMatchingTag(DDTags.SPAN_TYPE);
} }
@Override @Override
public boolean shouldSetTag(final DDSpanContext context, final String tag, final Object value) { public boolean shouldSetTag(final DDSpanContext context, final String tag, final Object value) {
context.setSpanType(String.valueOf(value)); context.setSpanType(String.valueOf(value));
// TODO: Do we really want a span type tag since it already exists on the span? // TODO: Do we really want a span type tag since it already exists on the span?
return false; return true;
} }
} }

View File

@ -25,6 +25,7 @@ public class DatadogHttpCodec {
private static final String TRACE_ID_KEY = "x-datadog-trace-id"; private static final String TRACE_ID_KEY = "x-datadog-trace-id";
private static final String SPAN_ID_KEY = "x-datadog-parent-id"; private static final String SPAN_ID_KEY = "x-datadog-parent-id";
private static final String SAMPLING_PRIORITY_KEY = "x-datadog-sampling-priority"; private static final String SAMPLING_PRIORITY_KEY = "x-datadog-sampling-priority";
private static final String ORIGIN_KEY = "x-datadog-origin";
public static class Injector { public static class Injector {
@ -34,6 +35,10 @@ public class DatadogHttpCodec {
if (context.lockSamplingPriority()) { if (context.lockSamplingPriority()) {
carrier.put(SAMPLING_PRIORITY_KEY, String.valueOf(context.getSamplingPriority())); carrier.put(SAMPLING_PRIORITY_KEY, String.valueOf(context.getSamplingPriority()));
} }
final String origin = context.getOrigin();
if (origin != null) {
carrier.put(ORIGIN_KEY, origin);
}
for (final Map.Entry<String, String> entry : context.baggageItems()) { for (final Map.Entry<String, String> entry : context.baggageItems()) {
carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), encode(entry.getValue())); carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), encode(entry.getValue()));
@ -69,6 +74,7 @@ public class DatadogHttpCodec {
String traceId = "0"; String traceId = "0";
String spanId = "0"; String spanId = "0";
int samplingPriority = PrioritySampling.UNSET; int samplingPriority = PrioritySampling.UNSET;
String origin = null;
for (final Map.Entry<String, String> entry : carrier) { for (final Map.Entry<String, String> entry : carrier) {
final String key = entry.getKey().toLowerCase(); final String key = entry.getKey().toLowerCase();
@ -82,13 +88,15 @@ public class DatadogHttpCodec {
traceId = validateUInt64BitsID(val); traceId = validateUInt64BitsID(val);
} else if (SPAN_ID_KEY.equalsIgnoreCase(key)) { } else if (SPAN_ID_KEY.equalsIgnoreCase(key)) {
spanId = validateUInt64BitsID(val); spanId = validateUInt64BitsID(val);
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(val);
} else if (ORIGIN_KEY.equalsIgnoreCase(key)) {
origin = val;
} else if (key.startsWith(OT_BAGGAGE_PREFIX)) { } else if (key.startsWith(OT_BAGGAGE_PREFIX)) {
if (baggage.isEmpty()) { if (baggage.isEmpty()) {
baggage = new HashMap<>(); baggage = new HashMap<>();
} }
baggage.put(key.replace(OT_BAGGAGE_PREFIX, ""), decode(val)); baggage.put(key.replace(OT_BAGGAGE_PREFIX, ""), decode(val));
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(val);
} }
if (taggedHeaders.containsKey(key)) { if (taggedHeaders.containsKey(key)) {
@ -102,13 +110,13 @@ public class DatadogHttpCodec {
SpanContext context = null; SpanContext context = null;
if (!"0".equals(traceId)) { if (!"0".equals(traceId)) {
final ExtractedContext ctx = final ExtractedContext ctx =
new ExtractedContext(traceId, spanId, samplingPriority, baggage, tags); new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
ctx.lockSamplingPriority(); ctx.lockSamplingPriority();
log.debug("{} - Parent context extracted", ctx.getTraceId()); log.debug("{} - Parent context extracted", ctx.getTraceId());
context = ctx; context = ctx;
} else if (!tags.isEmpty()) { } else if (origin != null || !tags.isEmpty()) {
context = new TagContext(tags); context = new TagContext(origin, tags);
} }
return context; return context;

View File

@ -17,9 +17,10 @@ public class ExtractedContext extends TagContext {
final String traceId, final String traceId,
final String spanId, final String spanId,
final int samplingPriority, final int samplingPriority,
final String origin,
final Map<String, String> baggage, final Map<String, String> baggage,
final Map<String, String> tags) { final Map<String, String> tags) {
super(tags); super(origin, tags);
this.traceId = traceId; this.traceId = traceId;
this.spanId = spanId; this.spanId = spanId;
this.samplingPriority = samplingPriority; this.samplingPriority = samplingPriority;

View File

@ -9,12 +9,18 @@ import java.util.Map;
* returned here even if the rest of the request would have returned null. * returned here even if the rest of the request would have returned null.
*/ */
public class TagContext implements SpanContext { public class TagContext implements SpanContext {
private final String origin;
private final Map<String, String> tags; private final Map<String, String> tags;
public TagContext(final Map<String, String> tags) { public TagContext(final String origin, final Map<String, String> tags) {
this.origin = origin;
this.tags = tags; this.tags = tags;
} }
public String getOrigin() {
return origin;
}
public Map<String, String> getTags() { public Map<String, String> getTags() {
return tags; return tags;
} }

View File

@ -1,11 +1,14 @@
package datadog.opentracing package datadog.opentracing
import datadog.opentracing.propagation.ExtractedContext import datadog.opentracing.propagation.ExtractedContext
import datadog.opentracing.propagation.TagContext
import datadog.trace.api.Config import datadog.trace.api.Config
import datadog.trace.api.DDTags import datadog.trace.api.DDTags
import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.writer.ListWriter import datadog.trace.common.writer.ListWriter
import spock.lang.Specification import spock.lang.Specification
import static datadog.opentracing.DDSpanContext.ORIGIN_KEY
import static java.util.concurrent.TimeUnit.MILLISECONDS import static java.util.concurrent.TimeUnit.MILLISECONDS
import static org.mockito.Mockito.mock import static org.mockito.Mockito.mock
import static org.mockito.Mockito.when import static org.mockito.Mockito.when
@ -382,6 +385,7 @@ class DDSpanBuilderTest extends Specification {
def "ExtractedContext should populate new span details"() { def "ExtractedContext should populate new span details"() {
setup: setup:
def thread = Thread.currentThread()
final DDSpan span = tracer.buildSpan("op name") final DDSpan span = tracer.buildSpan("op name")
.asChildOf(extractedContext).start() .asChildOf(extractedContext).start()
@ -389,14 +393,37 @@ class DDSpanBuilderTest extends Specification {
span.traceId == extractedContext.traceId span.traceId == extractedContext.traceId
span.parentId == extractedContext.spanId span.parentId == extractedContext.spanId
span.samplingPriority == extractedContext.samplingPriority span.samplingPriority == extractedContext.samplingPriority
span.context().origin == extractedContext.origin
span.context().baggageItems == extractedContext.baggage span.context().baggageItems == extractedContext.baggage
span.context().@tags == extractedContext.tags + [(Config.RUNTIME_ID_TAG) : config.getRuntimeId(), span.context().@tags == extractedContext.tags + [(Config.RUNTIME_ID_TAG) : config.getRuntimeId(),
(Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,] (Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,
(DDTags.THREAD_NAME) : thread.name, (DDTags.THREAD_ID): thread.id]
where: where:
extractedContext | _ extractedContext | _
new ExtractedContext("1", "2", 0, [:], [:]) | _ new ExtractedContext("1", "2", 0, null, [:], [:]) | _
new ExtractedContext("3", "4", 1, ["asdf": "qwer"], ["zxcv": "1234"]) | _ new ExtractedContext("3", "4", 1, "some-origin", ["asdf": "qwer"], [(ORIGIN_KEY): "some-origin", "zxcv": "1234"]) | _
}
def "TagContext should populate default span details"() {
setup:
def thread = Thread.currentThread()
final DDSpan span = tracer.buildSpan("op name").asChildOf(tagContext).start()
expect:
span.traceId != "0"
span.parentId == "0"
span.samplingPriority == PrioritySampling.SAMPLER_KEEP // Since we're using the RateByServiceSampler
span.context().origin == tagContext.origin
span.context().baggageItems == [:]
span.context().@tags == tagContext.tags + [(Config.RUNTIME_ID_TAG) : config.getRuntimeId(),
(Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,
(DDTags.THREAD_NAME) : thread.name, (DDTags.THREAD_ID): thread.id]
where:
tagContext | _
new TagContext(null, [:]) | _
new TagContext("some-origin", [(ORIGIN_KEY): "some-origin", "asdf": "qwer"]) | _
} }
def "global span tags populated on each span"() { def "global span tags populated on each span"() {

View File

@ -50,6 +50,7 @@ class DDSpanSerializationTest extends Specification {
"operation", "operation",
null, null,
samplingPriority, samplingPriority,
null,
new HashMap<>(baggage), new HashMap<>(baggage),
false, false,
"type", "type",
@ -90,6 +91,7 @@ class DDSpanSerializationTest extends Specification {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",

View File

@ -1,5 +1,7 @@
package datadog.opentracing package datadog.opentracing
import datadog.opentracing.propagation.ExtractedContext
import datadog.opentracing.propagation.TagContext
import datadog.trace.api.sampling.PrioritySampling import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.sampling.RateByServiceSampler import datadog.trace.common.sampling.RateByServiceSampler
import datadog.trace.common.writer.ListWriter import datadog.trace.common.writer.ListWriter
@ -24,6 +26,7 @@ class DDSpanTest extends Specification {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.<String, String> emptyMap(), Collections.<String, String> emptyMap(),
false, false,
"fakeType", "fakeType",
@ -183,6 +186,23 @@ class DDSpanTest extends Specification {
child2.getMetrics().get(DDSpanContext.PRIORITY_SAMPLING_KEY) == null child2.getMetrics().get(DDSpanContext.PRIORITY_SAMPLING_KEY) == null
} }
def "origin set only on root span"() {
setup:
def parent = tracer.buildSpan("testParent").asChildOf(extractedContext).start().context()
def child = tracer.buildSpan("testChild1").asChildOf(parent).start().context()
expect:
parent.origin == "some-origin"
parent.@origin == "some-origin" // Access field directly instead of getter.
child.origin == "some-origin"
child.@origin == null // Access field directly instead of getter.
where:
extractedContext | _
new TagContext("some-origin", [:]) | _
new ExtractedContext("1", "2", 0, "some-origin", [:], [:]) | _
}
def "getRootSpan returns the root span"() { def "getRootSpan returns the root span"() {
setup: setup:
def root = tracer.buildSpan("root").start() def root = tracer.buildSpan("root").start()

View File

@ -15,6 +15,7 @@ class SpanFactory {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",
@ -33,6 +34,7 @@ class SpanFactory {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",
@ -51,6 +53,7 @@ class SpanFactory {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",
@ -71,6 +74,7 @@ class SpanFactory {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",

View File

@ -97,6 +97,7 @@ class URLAsResourceNameTest extends Specification {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.<String, String> emptyMap(), Collections.<String, String> emptyMap(),
false, false,
"fakeType", "fakeType",

View File

@ -5,6 +5,7 @@ import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Specification import spock.lang.Specification
import static datadog.opentracing.propagation.DatadogHttpCodec.BIG_INTEGER_UINT64_MAX import static datadog.opentracing.propagation.DatadogHttpCodec.BIG_INTEGER_UINT64_MAX
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
@ -17,44 +18,57 @@ class DatadogHttpExtractorTest extends Specification {
def "extract http headers"() { def "extract http headers"() {
setup: setup:
final Map<String, String> actual = [ final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : "1", (TRACE_ID_KEY.toUpperCase()) : traceID,
(SPAN_ID_KEY.toUpperCase()) : "2", (SPAN_ID_KEY.toUpperCase()) : spanID,
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1", (OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2", (OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info", SOME_HEADER : "my-interesting-info",
] ]
if (samplingPriority != PrioritySampling.UNSET) { if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority)) actual.put(SAMPLING_PRIORITY_KEY, "$samplingPriority".toString())
}
if (origin) {
actual.put(ORIGIN_KEY, origin)
} }
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual)) final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect: expect:
context.getTraceId() == "1" context.traceId == traceID
context.getSpanId() == "2" context.spanId == spanID
context.getBaggage().get("k1") == "v1" context.baggage.get("k1") == "v1"
context.getBaggage().get("k2") == "v2" context.baggage.get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"] context.tags == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority context.samplingPriority == samplingPriority
context.origin == origin
where: where:
samplingPriority | _ traceID | spanID | samplingPriority | origin
PrioritySampling.UNSET | _ "1" | "2" | PrioritySampling.UNSET | null
PrioritySampling.SAMPLER_KEEP | _ "1" | "2" | PrioritySampling.SAMPLER_KEEP | "saipan"
// Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | PrioritySampling.UNSET | "saipan"
"18446744073709551615" | "18446744073709551614" | PrioritySampling.SAMPLER_KEEP | null
BIG_INTEGER_UINT64_MAX.toString() | BIG_INTEGER_UINT64_MAX.minus(1).toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
} }
def "extract header tags with no propagation"() { def "extract header tags with no propagation"() {
setup: when:
final Map<String, String> actual = [ TagContext context = extractor.extract(new TextMapExtractAdapter(headers))
SOME_HEADER: "my-interesting-info",
]
TagContext context = extractor.extract(new TextMapExtractAdapter(actual)) then:
expect:
!(context instanceof ExtractedContext) !(context instanceof ExtractedContext)
context.getTags() == ["some-tag": "my-interesting-info"] context.getTags() == ["some-tag": "my-interesting-info"]
if (headers.containsKey(ORIGIN_KEY)) {
((TagContext) context).origin == "my-origin"
}
where:
headers | _
[SOME_HEADER: "my-interesting-info"] | _
[(ORIGIN_KEY): "my-origin", SOME_HEADER: "my-interesting-info"] | _
} }
def "extract empty headers returns null"() { def "extract empty headers returns null"() {
@ -62,69 +76,6 @@ class DatadogHttpExtractorTest extends Specification {
extractor.extract(new TextMapExtractAdapter(["ignored-header": "ignored-value"])) == null extractor.extract(new TextMapExtractAdapter(["ignored-header": "ignored-value"])) == null
} }
def "extract http headers with larger than Java long IDs"() {
setup:
String largeTraceId = "9523372036854775807"
String largeSpanId = "15815582334751494918"
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : largeTraceId,
(SPAN_ID_KEY.toUpperCase()) : largeSpanId,
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
context.getTraceId() == largeTraceId
context.getSpanId() == largeSpanId
context.getBaggage().get("k1") == "v1"
context.getBaggage().get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "extract http headers with uint 64 max IDs"() {
setup:
String largeSpanId = BIG_INTEGER_UINT64_MAX.subtract(BigInteger.ONE).toString()
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : BIG_INTEGER_UINT64_MAX.toString(),
(SPAN_ID_KEY.toUpperCase()) : BIG_INTEGER_UINT64_MAX.minus(1).toString(),
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
context.getTraceId() == BIG_INTEGER_UINT64_MAX.toString()
context.getSpanId() == largeSpanId
context.getBaggage().get("k1") == "v1"
context.getBaggage().get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "extract http headers with invalid non-numeric ID"() { def "extract http headers with invalid non-numeric ID"() {
setup: setup:
final Map<String, String> actual = [ final Map<String, String> actual = [

View File

@ -8,6 +8,7 @@ import datadog.trace.common.writer.ListWriter
import io.opentracing.propagation.TextMapInjectAdapter import io.opentracing.propagation.TextMapInjectAdapter
import spock.lang.Specification import spock.lang.Specification
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
@ -23,13 +24,14 @@ class DatadogHttpInjectorTest extends Specification {
def tracer = new DDTracer(writer) def tracer = new DDTracer(writer)
final DDSpanContext mockedContext = final DDSpanContext mockedContext =
new DDSpanContext( new DDSpanContext(
"1", traceID,
"2", spanID,
"0", "0",
"fakeService", "fakeService",
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
samplingPriority, samplingPriority,
origin,
new HashMap<String, String>() { new HashMap<String, String>() {
{ {
put("k1", "v1") put("k1", "v1")
@ -42,110 +44,30 @@ class DatadogHttpInjectorTest extends Specification {
new PendingTrace(tracer, "1", [:]), new PendingTrace(tracer, "1", [:]),
tracer) tracer)
final Map<String, String> carrier = new HashMap<>() final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier)) injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect: then:
carrier.get(TRACE_ID_KEY) == "1" 1 * carrier.put(TRACE_ID_KEY, traceID)
carrier.get(SPAN_ID_KEY) == "2" 1 * carrier.put(SPAN_ID_KEY, spanID)
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority)) 1 * carrier.put(OT_BAGGAGE_PREFIX + "k1", "v1")
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1" 1 * carrier.put(OT_BAGGAGE_PREFIX + "k2", "v2")
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2" if (samplingPriority != PrioritySampling.UNSET) {
1 * carrier.put(SAMPLING_PRIORITY_KEY, "$samplingPriority")
}
if (origin) {
1 * carrier.put(ORIGIN_KEY, origin)
}
0 * _
where: where:
samplingPriority | _ traceID | spanID | parentID | samplingPriority | origin
PrioritySampling.UNSET | _ "1" | "2" | "0" | PrioritySampling.UNSET | null
PrioritySampling.SAMPLER_KEEP | _ "1" | "2" | "0" | PrioritySampling.SAMPLER_KEEP | "saipan"
} // Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | "15815582334751494914" | PrioritySampling.UNSET | "saipan"
def "inject http headers with larger than Java long IDs"() { "18446744073709551615" | "18446744073709551614" | "18446744073709551613" | PrioritySampling.SAMPLER_KEEP | null
String largeTraceId = "9523372036854775807"
String largeSpanId = "15815582334751494918"
String largeParentId = "15815582334751494914"
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
largeTraceId,
largeSpanId,
largeParentId,
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, largeTraceId, [:]),
tracer)
final Map<String, String> carrier = new HashMap<>()
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect:
carrier.get(TRACE_ID_KEY) == largeTraceId
carrier.get(SPAN_ID_KEY) == largeSpanId
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority))
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1"
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2"
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "inject http headers with uint 64 max IDs"() {
String largeTraceId = "18446744073709551615"
String largeSpanId = "18446744073709551614"
String largeParentId = "18446744073709551613"
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
largeTraceId,
largeSpanId,
largeParentId,
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, largeTraceId, [:]),
tracer)
final Map<String, String> carrier = new HashMap<>()
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect:
carrier.get(TRACE_ID_KEY) == largeTraceId
carrier.get(SPAN_ID_KEY) == largeSpanId
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority))
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1"
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2"
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
} }
} }

View File

@ -18,13 +18,13 @@ class DDSpanContextTest extends Specification {
context.serviceName == "fakeService" context.serviceName == "fakeService"
context.resourceName == "fakeResource" context.resourceName == "fakeResource"
context.spanType == "fakeType" context.spanType == "fakeType"
context.toString() == "DDSpan [ t_id=1, s_id=1, p_id=0] trace=fakeService/fakeOperation/fakeResource metrics={} *errored* tags={${extra}span.type=${context.getSpanType()}, thread.id=${Thread.currentThread().id}, thread.name=${Thread.currentThread().name}}" context.toString() == "DDSpan [ t_id=1, s_id=1, p_id=0] trace=fakeService/fakeOperation/fakeResource metrics={} *errored* tags={${extra}${tags.containsKey(DDTags.SPAN_TYPE) ? "span.type=${context.getSpanType()}, " : ""}thread.id=${Thread.currentThread().id}, thread.name=${Thread.currentThread().name}}"
where: where:
name | extra | tags name | extra | tags
DDTags.SERVICE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id] DDTags.SERVICE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
DDTags.RESOURCE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id] DDTags.RESOURCE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
DDTags.SPAN_TYPE | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id] DDTags.SPAN_TYPE | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
"some.tag" | "" | [(DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id] "some.tag" | "" | [(DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
} }

View File

@ -26,6 +26,7 @@ class DDApiIntegrationTest {
"fakeOperation", "fakeOperation",
"fakeResource", "fakeResource",
PrioritySampling.UNSET, PrioritySampling.UNSET,
null,
Collections.emptyMap(), Collections.emptyMap(),
false, false,
"fakeType", "fakeType",