Merge pull request #731 from DataDog/tyler/synthetics

Implement trace origin header and propagation support
This commit is contained in:
Tyler Benson 2019-02-26 09:01:37 -08:00 committed by GitHub
commit 0cdc80aa14
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 188 additions and 211 deletions

View File

@ -36,6 +36,9 @@ subprojects { subProj ->
classPath = project(':dd-java-agent:agent-tooling').configurations.instrumentationMuzzle + subProj.configurations.compile + subProj.sourceSets.main.output
}
}
// Make it so all instrumentation subproject tests can be run with a single command.
instr_project.tasks.test.dependsOn(subProj.tasks.test)
}
instr_project.dependencies {

View File

@ -47,7 +47,6 @@ class TestHttpServer implements AutoCloseable {
private TestHttpServer() {
int port = PortUtils.randomOpenPort()
internalServer = new Server(port)
internalServer.stopAtShutdown = true
address = new URI("http://localhost:$port")
}
@ -62,6 +61,8 @@ class TestHttpServer implements AutoCloseable {
internalServer.handler = handlerList
System.out.println("Starting server $this on port $address.port")
internalServer.start()
// set after starting, otherwise two callbacks get added.
internalServer.stopAtShutdown = true
return this
}

View File

@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j;
public class DDSpanContext implements io.opentracing.SpanContext {
public static final String PRIORITY_SAMPLING_KEY = "_sampling_priority_v1";
public static final String SAMPLE_RATE_KEY = "_sample_rate";
public static final String ORIGIN_KEY = "_dd.origin";
private static final Map<String, Number> EMPTY_METRICS = Collections.emptyMap();
@ -62,6 +63,8 @@ public class DDSpanContext implements io.opentracing.SpanContext {
* <p>For thread safety, this boolean is only modified or accessed under instance lock.
*/
private boolean samplingPriorityLocked = false;
/** The origin of the trace. (eg. Synthetics) */
private final String origin;
/** Metrics on the span */
private final AtomicReference<Map<String, Number>> metrics = new AtomicReference<>();
@ -77,6 +80,7 @@ public class DDSpanContext implements io.opentracing.SpanContext {
final String operationName,
final String resourceName,
final int samplingPriority,
final String origin,
final Map<String, String> baggageItems,
final boolean errorFlag,
final String spanType,
@ -111,10 +115,20 @@ public class DDSpanContext implements io.opentracing.SpanContext {
this.resourceName = resourceName;
this.errorFlag = errorFlag;
this.spanType = spanType;
this.origin = origin;
if (samplingPriority != PrioritySampling.UNSET) {
setSamplingPriority(samplingPriority);
}
if (spanType != null) {
this.tags.put(DDTags.SPAN_TYPE, spanType);
}
if (origin != null) {
this.tags.put(ORIGIN_KEY, origin);
}
this.tags.put(DDTags.THREAD_NAME, threadName);
this.tags.put(DDTags.THREAD_ID, threadId);
}
public String getTraceId() {
@ -167,6 +181,11 @@ public class DDSpanContext implements io.opentracing.SpanContext {
public void setSpanType(final String spanType) {
this.spanType = spanType;
if (spanType == null) {
tags.remove(DDTags.SPAN_TYPE);
} else {
tags.put(DDTags.SPAN_TYPE, spanType);
}
}
public void setSamplingPriority(final int newPriority) {
@ -236,6 +255,15 @@ public class DDSpanContext implements io.opentracing.SpanContext {
}
}
public String getOrigin() {
final DDSpan rootSpan = trace.getRootSpan();
if (null != rootSpan) {
return rootSpan.context().origin;
} else {
return origin;
}
}
public void setBaggageItem(final String key, final String value) {
baggageItems.put(key, value);
}
@ -312,12 +340,6 @@ public class DDSpanContext implements io.opentracing.SpanContext {
}
public synchronized Map<String, Object> getTags() {
tags.put(DDTags.THREAD_NAME, threadName);
tags.put(DDTags.THREAD_ID, threadId);
final String spanType = getSpanType();
if (spanType != null) {
tags.put(DDTags.SPAN_TYPE, spanType);
}
return Collections.unmodifiableMap(tags);
}

View File

@ -610,6 +610,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
final Map<String, String> baggage;
final PendingTrace parentTrace;
final int samplingPriority;
final String origin;
final DDSpanContext context;
SpanContext parentContext = parent;
@ -629,6 +630,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
baggage = ddsc.getBaggageItems();
parentTrace = ddsc.getTrace();
samplingPriority = PrioritySampling.UNSET;
origin = null;
if (serviceName == null) {
serviceName = ddsc.getServiceName();
}
@ -651,10 +653,15 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
samplingPriority = PrioritySampling.UNSET;
baggage = null;
}
// Get header tags whether propagating or not.
// Get header tags and set origin whether propagating or not.
if (parentContext instanceof TagContext) {
tags.putAll(((TagContext) parentContext).getTags());
origin = ((TagContext) parentContext).getOrigin();
} else {
origin = null;
}
// add runtime tags to the root span
for (final Map.Entry<String, String> runtimeTag : runtimeTags.entrySet()) {
tags.put(runtimeTag.getKey(), runtimeTag.getValue());
@ -679,6 +686,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
operationName,
resourceName,
samplingPriority,
origin,
baggage,
errorFlag,
spanType,

View File

@ -7,13 +7,13 @@ public class SpanTypeDecorator extends AbstractDecorator {
public SpanTypeDecorator() {
super();
this.setMatchingTag(DDTags.SPAN_TYPE);
setMatchingTag(DDTags.SPAN_TYPE);
}
@Override
public boolean shouldSetTag(final DDSpanContext context, final String tag, final Object value) {
context.setSpanType(String.valueOf(value));
// TODO: Do we really want a span type tag since it already exists on the span?
return false;
return true;
}
}

View File

@ -25,6 +25,7 @@ public class DatadogHttpCodec {
private static final String TRACE_ID_KEY = "x-datadog-trace-id";
private static final String SPAN_ID_KEY = "x-datadog-parent-id";
private static final String SAMPLING_PRIORITY_KEY = "x-datadog-sampling-priority";
private static final String ORIGIN_KEY = "x-datadog-origin";
public static class Injector {
@ -34,6 +35,10 @@ public class DatadogHttpCodec {
if (context.lockSamplingPriority()) {
carrier.put(SAMPLING_PRIORITY_KEY, String.valueOf(context.getSamplingPriority()));
}
final String origin = context.getOrigin();
if (origin != null) {
carrier.put(ORIGIN_KEY, origin);
}
for (final Map.Entry<String, String> entry : context.baggageItems()) {
carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), encode(entry.getValue()));
@ -69,6 +74,7 @@ public class DatadogHttpCodec {
String traceId = "0";
String spanId = "0";
int samplingPriority = PrioritySampling.UNSET;
String origin = null;
for (final Map.Entry<String, String> entry : carrier) {
final String key = entry.getKey().toLowerCase();
@ -82,13 +88,15 @@ public class DatadogHttpCodec {
traceId = validateUInt64BitsID(val);
} else if (SPAN_ID_KEY.equalsIgnoreCase(key)) {
spanId = validateUInt64BitsID(val);
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(val);
} else if (ORIGIN_KEY.equalsIgnoreCase(key)) {
origin = val;
} else if (key.startsWith(OT_BAGGAGE_PREFIX)) {
if (baggage.isEmpty()) {
baggage = new HashMap<>();
}
baggage.put(key.replace(OT_BAGGAGE_PREFIX, ""), decode(val));
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(val);
}
if (taggedHeaders.containsKey(key)) {
@ -102,13 +110,13 @@ public class DatadogHttpCodec {
SpanContext context = null;
if (!"0".equals(traceId)) {
final ExtractedContext ctx =
new ExtractedContext(traceId, spanId, samplingPriority, baggage, tags);
new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
ctx.lockSamplingPriority();
log.debug("{} - Parent context extracted", ctx.getTraceId());
context = ctx;
} else if (!tags.isEmpty()) {
context = new TagContext(tags);
} else if (origin != null || !tags.isEmpty()) {
context = new TagContext(origin, tags);
}
return context;

View File

@ -17,9 +17,10 @@ public class ExtractedContext extends TagContext {
final String traceId,
final String spanId,
final int samplingPriority,
final String origin,
final Map<String, String> baggage,
final Map<String, String> tags) {
super(tags);
super(origin, tags);
this.traceId = traceId;
this.spanId = spanId;
this.samplingPriority = samplingPriority;

View File

@ -9,12 +9,18 @@ import java.util.Map;
* returned here even if the rest of the request would have returned null.
*/
public class TagContext implements SpanContext {
private final String origin;
private final Map<String, String> tags;
public TagContext(final Map<String, String> tags) {
public TagContext(final String origin, final Map<String, String> tags) {
this.origin = origin;
this.tags = tags;
}
public String getOrigin() {
return origin;
}
public Map<String, String> getTags() {
return tags;
}

View File

@ -1,11 +1,14 @@
package datadog.opentracing
import datadog.opentracing.propagation.ExtractedContext
import datadog.opentracing.propagation.TagContext
import datadog.trace.api.Config
import datadog.trace.api.DDTags
import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.writer.ListWriter
import spock.lang.Specification
import static datadog.opentracing.DDSpanContext.ORIGIN_KEY
import static java.util.concurrent.TimeUnit.MILLISECONDS
import static org.mockito.Mockito.mock
import static org.mockito.Mockito.when
@ -382,6 +385,7 @@ class DDSpanBuilderTest extends Specification {
def "ExtractedContext should populate new span details"() {
setup:
def thread = Thread.currentThread()
final DDSpan span = tracer.buildSpan("op name")
.asChildOf(extractedContext).start()
@ -389,14 +393,37 @@ class DDSpanBuilderTest extends Specification {
span.traceId == extractedContext.traceId
span.parentId == extractedContext.spanId
span.samplingPriority == extractedContext.samplingPriority
span.context().origin == extractedContext.origin
span.context().baggageItems == extractedContext.baggage
span.context().@tags == extractedContext.tags + [(Config.RUNTIME_ID_TAG) : config.getRuntimeId(),
(Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,]
(Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,
(DDTags.THREAD_NAME) : thread.name, (DDTags.THREAD_ID): thread.id]
where:
extractedContext | _
new ExtractedContext("1", "2", 0, [:], [:]) | _
new ExtractedContext("3", "4", 1, ["asdf": "qwer"], ["zxcv": "1234"]) | _
new ExtractedContext("1", "2", 0, null, [:], [:]) | _
new ExtractedContext("3", "4", 1, "some-origin", ["asdf": "qwer"], [(ORIGIN_KEY): "some-origin", "zxcv": "1234"]) | _
}
def "TagContext should populate default span details"() {
setup:
def thread = Thread.currentThread()
final DDSpan span = tracer.buildSpan("op name").asChildOf(tagContext).start()
expect:
span.traceId != "0"
span.parentId == "0"
span.samplingPriority == PrioritySampling.SAMPLER_KEEP // Since we're using the RateByServiceSampler
span.context().origin == tagContext.origin
span.context().baggageItems == [:]
span.context().@tags == tagContext.tags + [(Config.RUNTIME_ID_TAG) : config.getRuntimeId(),
(Config.LANGUAGE_TAG_KEY): Config.LANGUAGE_TAG_VALUE,
(DDTags.THREAD_NAME) : thread.name, (DDTags.THREAD_ID): thread.id]
where:
tagContext | _
new TagContext(null, [:]) | _
new TagContext("some-origin", [(ORIGIN_KEY): "some-origin", "asdf": "qwer"]) | _
}
def "global span tags populated on each span"() {

View File

@ -50,6 +50,7 @@ class DDSpanSerializationTest extends Specification {
"operation",
null,
samplingPriority,
null,
new HashMap<>(baggage),
false,
"type",
@ -90,6 +91,7 @@ class DDSpanSerializationTest extends Specification {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",

View File

@ -1,5 +1,7 @@
package datadog.opentracing
import datadog.opentracing.propagation.ExtractedContext
import datadog.opentracing.propagation.TagContext
import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.sampling.RateByServiceSampler
import datadog.trace.common.writer.ListWriter
@ -24,6 +26,7 @@ class DDSpanTest extends Specification {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.<String, String> emptyMap(),
false,
"fakeType",
@ -183,6 +186,23 @@ class DDSpanTest extends Specification {
child2.getMetrics().get(DDSpanContext.PRIORITY_SAMPLING_KEY) == null
}
def "origin set only on root span"() {
setup:
def parent = tracer.buildSpan("testParent").asChildOf(extractedContext).start().context()
def child = tracer.buildSpan("testChild1").asChildOf(parent).start().context()
expect:
parent.origin == "some-origin"
parent.@origin == "some-origin" // Access field directly instead of getter.
child.origin == "some-origin"
child.@origin == null // Access field directly instead of getter.
where:
extractedContext | _
new TagContext("some-origin", [:]) | _
new ExtractedContext("1", "2", 0, "some-origin", [:], [:]) | _
}
def "getRootSpan returns the root span"() {
setup:
def root = tracer.buildSpan("root").start()

View File

@ -15,6 +15,7 @@ class SpanFactory {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",
@ -33,6 +34,7 @@ class SpanFactory {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",
@ -51,6 +53,7 @@ class SpanFactory {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",
@ -71,6 +74,7 @@ class SpanFactory {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",

View File

@ -97,6 +97,7 @@ class URLAsResourceNameTest extends Specification {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.<String, String> emptyMap(),
false,
"fakeType",

View File

@ -5,6 +5,7 @@ import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Specification
import static datadog.opentracing.propagation.DatadogHttpCodec.BIG_INTEGER_UINT64_MAX
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
@ -17,44 +18,57 @@ class DatadogHttpExtractorTest extends Specification {
def "extract http headers"() {
setup:
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : "1",
(SPAN_ID_KEY.toUpperCase()) : "2",
(TRACE_ID_KEY.toUpperCase()) : traceID,
(SPAN_ID_KEY.toUpperCase()) : spanID,
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
actual.put(SAMPLING_PRIORITY_KEY, "$samplingPriority".toString())
}
if (origin) {
actual.put(ORIGIN_KEY, origin)
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
context.getTraceId() == "1"
context.getSpanId() == "2"
context.getBaggage().get("k1") == "v1"
context.getBaggage().get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority
context.traceId == traceID
context.spanId == spanID
context.baggage.get("k1") == "v1"
context.baggage.get("k2") == "v2"
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == samplingPriority
context.origin == origin
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
traceID | spanID | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"1" | "2" | PrioritySampling.SAMPLER_KEEP | "saipan"
// Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | PrioritySampling.UNSET | "saipan"
"18446744073709551615" | "18446744073709551614" | PrioritySampling.SAMPLER_KEEP | null
BIG_INTEGER_UINT64_MAX.toString() | BIG_INTEGER_UINT64_MAX.minus(1).toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
}
def "extract header tags with no propagation"() {
setup:
final Map<String, String> actual = [
SOME_HEADER: "my-interesting-info",
]
when:
TagContext context = extractor.extract(new TextMapExtractAdapter(headers))
TagContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
then:
!(context instanceof ExtractedContext)
context.getTags() == ["some-tag": "my-interesting-info"]
if (headers.containsKey(ORIGIN_KEY)) {
((TagContext) context).origin == "my-origin"
}
where:
headers | _
[SOME_HEADER: "my-interesting-info"] | _
[(ORIGIN_KEY): "my-origin", SOME_HEADER: "my-interesting-info"] | _
}
def "extract empty headers returns null"() {
@ -62,69 +76,6 @@ class DatadogHttpExtractorTest extends Specification {
extractor.extract(new TextMapExtractAdapter(["ignored-header": "ignored-value"])) == null
}
def "extract http headers with larger than Java long IDs"() {
setup:
String largeTraceId = "9523372036854775807"
String largeSpanId = "15815582334751494918"
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : largeTraceId,
(SPAN_ID_KEY.toUpperCase()) : largeSpanId,
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
context.getTraceId() == largeTraceId
context.getSpanId() == largeSpanId
context.getBaggage().get("k1") == "v1"
context.getBaggage().get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "extract http headers with uint 64 max IDs"() {
setup:
String largeSpanId = BIG_INTEGER_UINT64_MAX.subtract(BigInteger.ONE).toString()
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : BIG_INTEGER_UINT64_MAX.toString(),
(SPAN_ID_KEY.toUpperCase()) : BIG_INTEGER_UINT64_MAX.minus(1).toString(),
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
expect:
context.getTraceId() == BIG_INTEGER_UINT64_MAX.toString()
context.getSpanId() == largeSpanId
context.getBaggage().get("k1") == "v1"
context.getBaggage().get("k2") == "v2"
context.getTags() == ["some-tag": "my-interesting-info"]
context.getSamplingPriority() == samplingPriority
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "extract http headers with invalid non-numeric ID"() {
setup:
final Map<String, String> actual = [

View File

@ -8,6 +8,7 @@ import datadog.trace.common.writer.ListWriter
import io.opentracing.propagation.TextMapInjectAdapter
import spock.lang.Specification
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
@ -23,13 +24,14 @@ class DatadogHttpInjectorTest extends Specification {
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
"1",
"2",
traceID,
spanID,
"0",
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
origin,
new HashMap<String, String>() {
{
put("k1", "v1")
@ -42,110 +44,30 @@ class DatadogHttpInjectorTest extends Specification {
new PendingTrace(tracer, "1", [:]),
tracer)
final Map<String, String> carrier = new HashMap<>()
final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect:
carrier.get(TRACE_ID_KEY) == "1"
carrier.get(SPAN_ID_KEY) == "2"
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority))
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1"
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2"
then:
1 * carrier.put(TRACE_ID_KEY, traceID)
1 * carrier.put(SPAN_ID_KEY, spanID)
1 * carrier.put(OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(OT_BAGGAGE_PREFIX + "k2", "v2")
if (samplingPriority != PrioritySampling.UNSET) {
1 * carrier.put(SAMPLING_PRIORITY_KEY, "$samplingPriority")
}
if (origin) {
1 * carrier.put(ORIGIN_KEY, origin)
}
0 * _
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "inject http headers with larger than Java long IDs"() {
String largeTraceId = "9523372036854775807"
String largeSpanId = "15815582334751494918"
String largeParentId = "15815582334751494914"
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
largeTraceId,
largeSpanId,
largeParentId,
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, largeTraceId, [:]),
tracer)
final Map<String, String> carrier = new HashMap<>()
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect:
carrier.get(TRACE_ID_KEY) == largeTraceId
carrier.get(SPAN_ID_KEY) == largeSpanId
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority))
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1"
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2"
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
}
def "inject http headers with uint 64 max IDs"() {
String largeTraceId = "18446744073709551615"
String largeSpanId = "18446744073709551614"
String largeParentId = "18446744073709551613"
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
largeTraceId,
largeSpanId,
largeParentId,
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, largeTraceId, [:]),
tracer)
final Map<String, String> carrier = new HashMap<>()
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
expect:
carrier.get(TRACE_ID_KEY) == largeTraceId
carrier.get(SPAN_ID_KEY) == largeSpanId
carrier.get(SAMPLING_PRIORITY_KEY) == (samplingPriority == PrioritySampling.UNSET ? null : String.valueOf(samplingPriority))
carrier.get(OT_BAGGAGE_PREFIX + "k1") == "v1"
carrier.get(OT_BAGGAGE_PREFIX + "k2") == "v2"
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
traceID | spanID | parentID | samplingPriority | origin
"1" | "2" | "0" | PrioritySampling.UNSET | null
"1" | "2" | "0" | PrioritySampling.SAMPLER_KEEP | "saipan"
// Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | "15815582334751494914" | PrioritySampling.UNSET | "saipan"
"18446744073709551615" | "18446744073709551614" | "18446744073709551613" | PrioritySampling.SAMPLER_KEEP | null
}
}

View File

@ -18,13 +18,13 @@ class DDSpanContextTest extends Specification {
context.serviceName == "fakeService"
context.resourceName == "fakeResource"
context.spanType == "fakeType"
context.toString() == "DDSpan [ t_id=1, s_id=1, p_id=0] trace=fakeService/fakeOperation/fakeResource metrics={} *errored* tags={${extra}span.type=${context.getSpanType()}, thread.id=${Thread.currentThread().id}, thread.name=${Thread.currentThread().name}}"
context.toString() == "DDSpan [ t_id=1, s_id=1, p_id=0] trace=fakeService/fakeOperation/fakeResource metrics={} *errored* tags={${extra}${tags.containsKey(DDTags.SPAN_TYPE) ? "span.type=${context.getSpanType()}, " : ""}thread.id=${Thread.currentThread().id}, thread.name=${Thread.currentThread().name}}"
where:
name | extra | tags
DDTags.SERVICE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
DDTags.RESOURCE_NAME | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
DDTags.SPAN_TYPE | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
DDTags.SPAN_TYPE | "some.tag=asdf, " | ["some.tag": "asdf", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
"some.tag" | "" | [(DDTags.SPAN_TYPE): "fakeType", (DDTags.THREAD_NAME): Thread.currentThread().name, (DDTags.THREAD_ID): Thread.currentThread().id]
}

View File

@ -26,6 +26,7 @@ class DDApiIntegrationTest {
"fakeOperation",
"fakeResource",
PrioritySampling.UNSET,
null,
Collections.emptyMap(),
false,
"fakeType",