Change String ids to BigInteger ids

This commit is contained in:
Laplie Anderson 2019-11-04 11:21:32 -05:00
parent 466fdf2a79
commit cfc19facf5
29 changed files with 269 additions and 343 deletions

View File

@ -81,16 +81,16 @@ class SpanAssert {
}
def parent() {
assert span.parentId == "0"
assert span.parentId == BigInteger.ZERO
checked.parentId = true
}
def parentId(String parentId) {
def parentId(BigInteger parentId) {
assert span.parentId == parentId
checked.parentId = true
}
def traceId(String traceId) {
def traceId(BigInteger traceId) {
assert span.traceId == traceId
checked.traceId = true
}

View File

@ -9,7 +9,7 @@ import groovy.transform.stc.SimpleType
import java.util.regex.Pattern
class TagsAssert {
private final String spanParentId
private final BigInteger spanParentId
private final Map<String, Object> tags
private final Set<String> assertedTags = new TreeSet<>()
@ -43,7 +43,7 @@ class TagsAssert {
// FIXME: DQH - Too much conditional logic? Maybe create specialized methods for client & server cases
boolean isRoot = ("0" == spanParentId)
boolean isRoot = (BigInteger.ZERO == spanParentId)
if (isRoot || distributedRootSpan) {
assert tags[Config.RUNTIME_ID_TAG] == Config.get().runtimeId
} else {

View File

@ -12,8 +12,8 @@ class TraceCorrelationTest extends AgentTestRunner {
DDSpan span = (DDSpan) scope.span()
then:
CorrelationIdentifier.traceId == span.traceId
CorrelationIdentifier.spanId == span.spanId
CorrelationIdentifier.traceId == span.traceId.toString()
CorrelationIdentifier.spanId == span.spanId.toString()
when:
scope.close()

View File

@ -4,17 +4,12 @@ import static io.opentracing.log.Fields.ERROR_OBJECT;
import com.fasterxml.jackson.annotation.JsonGetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
import datadog.trace.api.DDTags;
import datadog.trace.api.interceptor.MutableSpan;
import datadog.trace.api.sampling.PrioritySampling;
import datadog.trace.common.util.Clock;
import io.opentracing.Span;
import io.opentracing.tag.Tag;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.ref.WeakReference;
@ -126,7 +121,7 @@ public class DDSpan implements Span, MutableSpan {
*/
@JsonIgnore
public final boolean isRootSpan() {
return "0".equals(context.getParentId());
return BigInteger.ZERO.equals(context.getParentId());
}
@Override
@ -346,20 +341,17 @@ public class DDSpan implements Span, MutableSpan {
}
@JsonGetter("trace_id")
@JsonSerialize(using = UInt64IDStringSerializer.class)
public String getTraceId() {
public BigInteger getTraceId() {
return context.getTraceId();
}
@JsonGetter("span_id")
@JsonSerialize(using = UInt64IDStringSerializer.class)
public String getSpanId() {
public BigInteger getSpanId() {
return context.getSpanId();
}
@JsonGetter("parent_id")
@JsonSerialize(using = UInt64IDStringSerializer.class)
public String getParentId() {
public BigInteger getParentId() {
return context.getParentId();
}
@ -422,31 +414,4 @@ public class DDSpan implements Span, MutableSpan {
.append(durationNano)
.toString();
}
protected static class UInt64IDStringSerializer extends StdSerializer<String> {
private static final int LONG_PARSE_LIMIT = String.valueOf(Long.MAX_VALUE).length();
public UInt64IDStringSerializer() {
this(null);
}
public UInt64IDStringSerializer(final Class<String> stringClass) {
super(stringClass);
}
@Override
public void serialize(
final String value, final JsonGenerator gen, final SerializerProvider provider)
throws IOException {
final int length = value.length();
// BigInteger's are expensive, so lets try to avoid using them if possible.
// This is a rough approximation for optimization.
// There are some values that would pass this test that could be parsed with Long.parseLong.
if (length > LONG_PARSE_LIMIT || (length == LONG_PARSE_LIMIT && value.startsWith("9"))) {
gen.writeNumber(new BigInteger(value));
} else {
gen.writeNumber(Long.parseLong(value));
}
}
}
}

View File

@ -4,6 +4,7 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import datadog.opentracing.decorators.AbstractDecorator;
import datadog.trace.api.DDTags;
import datadog.trace.api.sampling.PrioritySampling;
import java.math.BigInteger;
import java.util.Collections;
import java.util.List;
import java.util.Map;
@ -39,9 +40,9 @@ public class DDSpanContext implements io.opentracing.SpanContext {
private final Map<String, String> baggageItems;
// Not Shared with other span contexts
private final String traceId;
private final String spanId;
private final String parentId;
private final BigInteger traceId;
private final BigInteger spanId;
private final BigInteger parentId;
/** Tags are associated to the current span, they will not propagate to the children span */
private final Map<String, Object> tags = new ConcurrentHashMap<>();
@ -73,9 +74,9 @@ public class DDSpanContext implements io.opentracing.SpanContext {
private final long threadId = Thread.currentThread().getId();
public DDSpanContext(
final String traceId,
final String spanId,
final String parentId,
final BigInteger traceId,
final BigInteger spanId,
final BigInteger parentId,
final String serviceName,
final String operationName,
final String resourceName,
@ -128,26 +129,26 @@ public class DDSpanContext implements io.opentracing.SpanContext {
this.tags.put(DDTags.THREAD_ID, threadId);
}
public String getTraceId() {
public BigInteger getTraceId() {
return traceId;
}
@Override
public String toTraceId() {
return traceId;
return traceId.toString();
}
public String getParentId() {
public BigInteger getParentId() {
return parentId;
}
public String getSpanId() {
public BigInteger getSpanId() {
return spanId;
}
@Override
public String toSpanId() {
return spanId;
return spanId.toString();
}
public String getServiceName() {

View File

@ -28,6 +28,7 @@ import io.opentracing.propagation.TextMapInject;
import io.opentracing.tag.Tag;
import java.io.Closeable;
import java.lang.ref.WeakReference;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@ -48,6 +49,10 @@ import lombok.extern.slf4j.Slf4j;
/** DDTracer makes it easy to send traces and span to DD using the OpenTracing API. */
@Slf4j
public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace.api.Tracer {
// UINT64 max value
public static final BigInteger TRACE_ID_MAX =
BigInteger.valueOf(2).pow(64).subtract(BigInteger.ONE);
public static final BigInteger TRACE_ID_MIN = BigInteger.ZERO;
/** Default service name if none provided on the trace or span */
final String serviceName;
@ -400,7 +405,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
public String getTraceId() {
final Span activeSpan = activeSpan();
if (activeSpan instanceof DDSpan) {
return ((DDSpan) activeSpan).getTraceId();
return ((DDSpan) activeSpan).getTraceId().toString();
}
return "0";
}
@ -409,7 +414,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
public String getSpanId() {
final Span activeSpan = activeSpan();
if (activeSpan instanceof DDSpan) {
return ((DDSpan) activeSpan).getSpanId();
return ((DDSpan) activeSpan).getSpanId().toString();
}
return "0";
}
@ -604,10 +609,15 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
return this;
}
private String generateNewId() {
// TODO: expand the range of numbers generated to be from 1 to uint 64 MAX
// Ensure the generated ID is in a valid range:
return String.valueOf(ThreadLocalRandom.current().nextLong(1, Long.MAX_VALUE));
private BigInteger generateNewId() {
// It is **extremely** unlikely to generate the value "0" but we still need to handle that
// case
BigInteger value;
do {
value = new BigInteger(64, ThreadLocalRandom.current());
} while (value.signum() == 0);
return value;
}
/**
@ -617,9 +627,9 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
* @return the context
*/
private DDSpanContext buildSpanContext() {
final String traceId;
final String spanId = generateNewId();
final String parentSpanId;
final BigInteger traceId;
final BigInteger spanId = generateNewId();
final BigInteger parentSpanId;
final Map<String, String> baggage;
final PendingTrace parentTrace;
final int samplingPriority;
@ -661,7 +671,7 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
} else {
// Start a new trace
traceId = generateNewId();
parentSpanId = "0";
parentSpanId = BigInteger.ZERO;
samplingPriority = PrioritySampling.UNSET;
baggage = null;
}

View File

@ -6,6 +6,7 @@ import java.io.Closeable;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
@ -28,7 +29,7 @@ public class PendingTrace extends ConcurrentLinkedDeque<DDSpan> {
private static final AtomicReference<SpanCleaner> SPAN_CLEANER = new AtomicReference<>();
private final DDTracer tracer;
private final String traceId;
private final BigInteger traceId;
private final Map<String, String> serviceNameMappings;
// TODO: consider moving these time fields into DDTracer to ensure that traces have precise
@ -62,7 +63,9 @@ public class PendingTrace extends ConcurrentLinkedDeque<DDSpan> {
private final AtomicBoolean isWritten = new AtomicBoolean(false);
PendingTrace(
final DDTracer tracer, final String traceId, final Map<String, String> serviceNameMappings) {
final DDTracer tracer,
final BigInteger traceId,
final Map<String, String> serviceNameMappings) {
this.tracer = tracer;
this.traceId = traceId;
this.serviceNameMappings = serviceNameMappings;

View File

@ -1,6 +1,5 @@
package datadog.opentracing.propagation;
import static datadog.opentracing.propagation.HttpCodec.ZERO;
import static datadog.opentracing.propagation.HttpCodec.validateUInt64BitsID;
import datadog.opentracing.DDSpanContext;
@ -40,12 +39,8 @@ class B3HttpCodec {
@Override
public void inject(final DDSpanContext context, final TextMapInject carrier) {
try {
// TODO: should we better store ids as BigInteger in context to avoid parsing it twice.
final BigInteger traceId = new BigInteger(context.getTraceId());
final BigInteger spanId = new BigInteger(context.getSpanId());
carrier.put(TRACE_ID_KEY, traceId.toString(HEX_RADIX).toLowerCase());
carrier.put(SPAN_ID_KEY, spanId.toString(HEX_RADIX).toLowerCase());
carrier.put(TRACE_ID_KEY, context.getTraceId().toString(HEX_RADIX).toLowerCase());
carrier.put(SPAN_ID_KEY, context.getSpanId().toString(HEX_RADIX).toLowerCase());
if (context.lockSamplingPriority()) {
carrier.put(
@ -78,8 +73,8 @@ class B3HttpCodec {
public SpanContext extract(final TextMapExtract carrier) {
try {
Map<String, String> tags = Collections.emptyMap();
String traceId = ZERO;
String spanId = ZERO;
BigInteger traceId = BigInteger.ZERO;
BigInteger spanId = BigInteger.ZERO;
int samplingPriority = PrioritySampling.UNSET;
for (final Map.Entry<String, String> entry : carrier) {
@ -95,7 +90,7 @@ class B3HttpCodec {
final int length = value.length();
if (length > 32) {
log.debug("Header {} exceeded max length of 32: {}", TRACE_ID_KEY, value);
traceId = "0";
traceId = BigInteger.ZERO;
continue;
} else if (length > 16) {
trimmedValue = value.substring(length - 16);
@ -117,7 +112,7 @@ class B3HttpCodec {
}
}
if (!ZERO.equals(traceId)) {
if (!BigInteger.ZERO.equals(traceId)) {
final ExtractedContext context =
new ExtractedContext(
traceId,

View File

@ -1,6 +1,5 @@
package datadog.opentracing.propagation;
import static datadog.opentracing.propagation.HttpCodec.ZERO;
import static datadog.opentracing.propagation.HttpCodec.validateUInt64BitsID;
import datadog.opentracing.DDSpanContext;
@ -8,6 +7,7 @@ import datadog.trace.api.sampling.PrioritySampling;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMapExtract;
import io.opentracing.propagation.TextMapInject;
import java.math.BigInteger;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -31,8 +31,8 @@ class DatadogHttpCodec {
@Override
public void inject(final DDSpanContext context, final TextMapInject carrier) {
carrier.put(TRACE_ID_KEY, context.getTraceId());
carrier.put(SPAN_ID_KEY, context.getSpanId());
carrier.put(TRACE_ID_KEY, context.getTraceId().toString());
carrier.put(SPAN_ID_KEY, context.getSpanId().toString());
if (context.lockSamplingPriority()) {
carrier.put(SAMPLING_PRIORITY_KEY, String.valueOf(context.getSamplingPriority()));
}
@ -63,8 +63,8 @@ class DatadogHttpCodec {
try {
Map<String, String> baggage = Collections.emptyMap();
Map<String, String> tags = Collections.emptyMap();
String traceId = ZERO;
String spanId = ZERO;
BigInteger traceId = BigInteger.ZERO;
BigInteger spanId = BigInteger.ZERO;
int samplingPriority = PrioritySampling.UNSET;
String origin = null;
@ -99,7 +99,7 @@ class DatadogHttpCodec {
}
}
if (!ZERO.equals(traceId)) {
if (!BigInteger.ZERO.equals(traceId)) {
final ExtractedContext context =
new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
context.lockSamplingPriority();

View File

@ -1,5 +1,6 @@
package datadog.opentracing.propagation;
import java.math.BigInteger;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
@ -7,15 +8,15 @@ import java.util.concurrent.atomic.AtomicBoolean;
* Propagated data resulting from calling tracer.extract with header data from an incoming request.
*/
public class ExtractedContext extends TagContext {
private final String traceId;
private final String spanId;
private final BigInteger traceId;
private final BigInteger spanId;
private final int samplingPriority;
private final Map<String, String> baggage;
private final AtomicBoolean samplingPriorityLocked = new AtomicBoolean(false);
public ExtractedContext(
final String traceId,
final String spanId,
final BigInteger traceId,
final BigInteger spanId,
final int samplingPriority,
final String origin,
final Map<String, String> baggage,
@ -36,11 +37,11 @@ public class ExtractedContext extends TagContext {
samplingPriorityLocked.set(true);
}
public String getTraceId() {
public BigInteger getTraceId() {
return traceId;
}
public String getSpanId() {
public BigInteger getSpanId() {
return spanId;
}

View File

@ -1,6 +1,5 @@
package datadog.opentracing.propagation;
import static datadog.opentracing.propagation.HttpCodec.ZERO;
import static datadog.opentracing.propagation.HttpCodec.validateUInt64BitsID;
import datadog.opentracing.DDSpanContext;
@ -8,6 +7,7 @@ import datadog.trace.api.sampling.PrioritySampling;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMapExtract;
import io.opentracing.propagation.TextMapInject;
import java.math.BigInteger;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -34,9 +34,9 @@ public class HaystackHttpCodec {
@Override
public void inject(final DDSpanContext context, final TextMapInject carrier) {
carrier.put(TRACE_ID_KEY, context.getTraceId());
carrier.put(SPAN_ID_KEY, context.getSpanId());
carrier.put(PARENT_ID_KEY, context.getParentId());
carrier.put(TRACE_ID_KEY, context.getTraceId().toString());
carrier.put(SPAN_ID_KEY, context.getSpanId().toString());
carrier.put(PARENT_ID_KEY, context.getParentId().toString());
for (final Map.Entry<String, String> entry : context.baggageItems()) {
carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), HttpCodec.encode(entry.getValue()));
@ -61,8 +61,8 @@ public class HaystackHttpCodec {
try {
Map<String, String> baggage = Collections.emptyMap();
Map<String, String> tags = Collections.emptyMap();
String traceId = ZERO;
String spanId = ZERO;
BigInteger traceId = BigInteger.ZERO;
BigInteger spanId = BigInteger.ZERO;
final int samplingPriority = PrioritySampling.SAMPLER_KEEP;
final String origin = null; // Always null
@ -93,7 +93,7 @@ public class HaystackHttpCodec {
}
}
if (!ZERO.equals(traceId)) {
if (!BigInteger.ZERO.equals(traceId)) {
final ExtractedContext context =
new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
context.lockSamplingPriority();

View File

@ -1,6 +1,7 @@
package datadog.opentracing.propagation;
import datadog.opentracing.DDSpanContext;
import datadog.opentracing.DDTracer;
import datadog.trace.api.Config;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMapExtract;
@ -16,11 +17,6 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
public class HttpCodec {
// uint 64 bits max value, 2^64 - 1
static final BigInteger UINT64_MAX = new BigInteger("2").pow(64).subtract(BigInteger.ONE);
static final String ZERO = "0";
public interface Injector {
void inject(final DDSpanContext context, final TextMapInject carrier);
@ -111,24 +107,24 @@ public class HttpCodec {
}
/**
* Helper method to validate an ID String to verify that it is an unsigned 64 bits number and is
* within range.
* Helper method to validate an ID String to verify within range
*
* @param value the String that contains the ID
* @param radix radix to use to parse the ID
* @return the ID in String format if it passes validations, "0" otherwise
* @return the parsed ID
* @throws IllegalArgumentException if value cannot be converted to integer or doesn't conform to
* required boundaries
*/
static String validateUInt64BitsID(final String value, final int radix)
static BigInteger validateUInt64BitsID(final String value, final int radix)
throws IllegalArgumentException {
final BigInteger parsedValue = new BigInteger(value, radix);
if (parsedValue.compareTo(BigInteger.ZERO) == -1 || parsedValue.compareTo(UINT64_MAX) == 1) {
if (parsedValue.compareTo(DDTracer.TRACE_ID_MIN) < 0
|| parsedValue.compareTo(DDTracer.TRACE_ID_MAX) > 0) {
throw new IllegalArgumentException(
"ID out of range, must be between 0 and 2^64-1, got: " + value);
}
// We use decimals
return parsedValue.toString();
return parsedValue;
}
/** URL encode value */

View File

@ -149,15 +149,15 @@ class DDSpanBuilderTest extends DDSpecification {
def "should link to parent span"() {
setup:
final String spanId = "1"
final long expectedParentId = spanId
final BigInteger spanId = BigInteger.ONE
final BigInteger expectedParentId = spanId
final DDSpanContext mockedContext = Mock()
1 * mockedContext.getTraceId() >> spanId
1 * mockedContext.getSpanId() >> spanId
_ * mockedContext.getServiceName() >> "foo"
1 * mockedContext.getBaggageItems() >> [:]
1 * mockedContext.getTrace() >> new PendingTrace(tracer, "1", [:])
1 * mockedContext.getTrace() >> new PendingTrace(tracer, BigInteger.ONE, [:])
final String expectedName = "fakeName"
@ -182,7 +182,7 @@ class DDSpanBuilderTest extends DDSpecification {
tracer.buildSpan("parent")
.startActive(false)
final String expectedParentId = noopParent ? "0" : parent.span().context().getSpanId()
final BigInteger expectedParentId = noopParent ? BigInteger.ZERO : new BigInteger(parent.span().context().toSpanId())
final String expectedName = "fakeName"
@ -429,9 +429,9 @@ class DDSpanBuilderTest extends DDSpecification {
(DDTags.THREAD_NAME) : thread.name, (DDTags.THREAD_ID): thread.id]
where:
extractedContext | _
new ExtractedContext("1", "2", 0, null, [:], [:]) | _
new ExtractedContext("3", "4", 1, "some-origin", ["asdf": "qwer"], [(ORIGIN_KEY): "some-origin", "zxcv": "1234"]) | _
extractedContext | _
new ExtractedContext(BigInteger.ONE, BigInteger.valueOf(2), 0, null, [:], [:]) | _
new ExtractedContext(BigInteger.valueOf(3), BigInteger.valueOf(4), 1, "some-origin", ["asdf": "qwer"], [(ORIGIN_KEY): "some-origin", "zxcv": "1234"]) | _
}
def "TagContext should populate default span details"() {
@ -440,8 +440,8 @@ class DDSpanBuilderTest extends DDSpecification {
final DDSpan span = tracer.buildSpan("op name").asChildOf(tagContext).start()
expect:
span.traceId != "0"
span.parentId == "0"
span.traceId != BigInteger.ZERO
span.parentId == BigInteger.ZERO
span.samplingPriority == PrioritySampling.SAMPLER_KEEP // Since we're using the RateByServiceSampler
span.context().origin == tagContext.origin
span.context().baggageItems == [:]

View File

@ -43,9 +43,9 @@ class DDSpanSerializationTest extends DDSpecification {
def tracer = new DDTracer(writer)
final DDSpanContext context =
new DDSpanContext(
"1",
"2",
"0",
BigInteger.ONE,
BigInteger.valueOf(2),
BigInteger.ZERO,
"service",
"operation",
null,
@ -55,7 +55,7 @@ class DDSpanSerializationTest extends DDSpecification {
false,
"type",
tags,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
baggage.put(DDTags.THREAD_NAME, Thread.currentThread().getName())
@ -85,9 +85,9 @@ class DDSpanSerializationTest extends DDSpecification {
def writer = new ListWriter()
def tracer = new DDTracer(writer)
def context = new DDSpanContext(
value.toString(),
value.toString(),
"0",
value,
value,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -97,7 +97,7 @@ class DDSpanSerializationTest extends DDSpecification {
false,
"fakeType",
Collections.emptyMap(),
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
def span = new DDSpan(0, context)
byte[] bytes = objectMapper.writeValueAsBytes(span)

View File

@ -33,9 +33,9 @@ class DDSpanTest extends DDSpecification {
setup:
final DDSpanContext context =
new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -45,7 +45,7 @@ class DDSpanTest extends DDSpecification {
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
final DDSpan span = new DDSpan(1L, context)
@ -212,9 +212,9 @@ class DDSpanTest extends DDSpecification {
child.@origin == null // Access field directly instead of getter.
where:
extractedContext | _
new TagContext("some-origin", [:]) | _
new ExtractedContext("1", "2", 0, "some-origin", [:], [:]) | _
extractedContext | _
new TagContext("some-origin", [:]) | _
new ExtractedContext(BigInteger.ONE, BigInteger.valueOf(2), 0, "some-origin", [:], [:]) | _
}
def "isRootSpan() in and not in the context of distributed tracing"() {
@ -231,9 +231,9 @@ class DDSpanTest extends DDSpecification {
root.finish()
where:
extractedContext | isTraceRootSpan
null | true
new ExtractedContext("123", "456", 1, "789", [:], [:]) | false
extractedContext | isTraceRootSpan
null | true
new ExtractedContext(BigInteger.valueOf(123), BigInteger.valueOf(456), 1, "789", [:], [:]) | false
}
def "getApplicationRootSpan() in and not in the context of distributed tracing"() {
@ -253,9 +253,9 @@ class DDSpanTest extends DDSpecification {
root.finish()
where:
extractedContext | isTraceRootSpan
null | true
new ExtractedContext("123", "456", 1, "789", [:], [:]) | false
extractedContext | isTraceRootSpan
null | true
new ExtractedContext(BigInteger.valueOf(123), BigInteger.valueOf(456), 1, "789", [:], [:]) | false
}
def "sampling priority set on init"() {

View File

@ -26,10 +26,9 @@ class PendingTraceTest extends DDSpecification {
def tracer = new DDTracer(writer)
def traceId = System.identityHashCode(this)
String traceIdStr = String.valueOf(traceId)
@Subject
PendingTrace trace = new PendingTrace(tracer, traceIdStr, [:])
PendingTrace trace = new PendingTrace(tracer, BigInteger.valueOf(traceId), [:])
DDSpan rootSpan = SpanFactory.newSpanOf(trace)
@ -148,7 +147,7 @@ class PendingTraceTest extends DDSpecification {
def "register span to wrong trace fails"() {
setup:
def otherTrace = new PendingTrace(tracer, String.valueOf(traceId - 10), [:])
def otherTrace = new PendingTrace(tracer, BigInteger.valueOf(traceId - 10), [:])
otherTrace.registerSpan(new DDSpan(0, rootSpan.context()))
expect:
@ -159,7 +158,7 @@ class PendingTraceTest extends DDSpecification {
def "add span to wrong trace fails"() {
setup:
def otherTrace = new PendingTrace(tracer, String.valueOf(traceId - 10), [:])
def otherTrace = new PendingTrace(tracer, BigInteger.valueOf(traceId - 10), [:])
rootSpan.finish()
otherTrace.addSpan(rootSpan)
@ -197,7 +196,7 @@ class PendingTraceTest extends DDSpecification {
properties.setProperty(PARTIAL_FLUSH_MIN_SPANS, "1")
def config = Config.get(properties)
def tracer = new DDTracer(config, writer)
def trace = new PendingTrace(tracer, traceIdStr, [:])
def trace = new PendingTrace(tracer, BigInteger.valueOf(traceId), [:])
def rootSpan = SpanFactory.newSpanOf(trace)
def child1 = tracer.buildSpan("child1").asChildOf(rootSpan).start()
def child2 = tracer.buildSpan("child2").asChildOf(rootSpan).start()
@ -243,7 +242,7 @@ class PendingTraceTest extends DDSpecification {
properties.setProperty(PARTIAL_FLUSH_MIN_SPANS, "1")
def config = Config.get(properties)
def tracer = new DDTracer(config, writer)
def trace = new PendingTrace(tracer, traceIdStr, [:])
def trace = new PendingTrace(tracer, BigInteger.valueOf(traceId), [:])
def rootSpan = SpanFactory.newSpanOf(trace)
def child1 = tracer.buildSpan("child1").asChildOf(rootSpan).start()
def child2 = tracer.buildSpan("child2").asChildOf(rootSpan).start()

View File

@ -12,9 +12,9 @@ class SpanFactory {
def currentThreadName = Thread.currentThread().getName()
Thread.currentThread().setName(threadName)
def context = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -24,7 +24,7 @@ class SpanFactory {
false,
"fakeType",
Collections.emptyMap(),
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
Thread.currentThread().setName(currentThreadName)
return new DDSpan(timestampMicro, context)
@ -32,9 +32,9 @@ class SpanFactory {
static DDSpan newSpanOf(DDTracer tracer) {
def context = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -44,7 +44,7 @@ class SpanFactory {
false,
"fakeType",
Collections.emptyMap(),
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
return new DDSpan(1, context)
}
@ -52,8 +52,8 @@ class SpanFactory {
static DDSpan newSpanOf(PendingTrace trace) {
def context = new DDSpanContext(
trace.traceId,
"1",
"0",
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -72,9 +72,9 @@ class SpanFactory {
def writer = new ListWriter()
def tracer = new DDTracer(writer)
def context = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
serviceName,
"fakeOperation",
"fakeResource",
@ -84,7 +84,7 @@ class SpanFactory {
false,
"fakeType",
Collections.emptyMap(),
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
context.setTag("env", envName)
return new DDSpan(0l, context)

View File

@ -28,7 +28,7 @@ class TraceCorrelationTest extends DDSpecification {
def "get trace id with trace"() {
expect:
((DDSpan) scope.span()).traceId == tracer.getTraceId()
((DDSpan) scope.span()).traceId.toString() == tracer.getTraceId()
}
def "get span id without span"() {
@ -41,6 +41,6 @@ class TraceCorrelationTest extends DDSpecification {
def "get span id with trace"() {
expect:
((DDSpan) scope.span()).spanId == tracer.getSpanId()
((DDSpan) scope.span()).spanId.toString() == tracer.getSpanId()
}
}

View File

@ -107,9 +107,9 @@ class URLAsResourceNameTest extends DDSpecification {
when:
final DDSpanContext context =
new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -119,7 +119,7 @@ class URLAsResourceNameTest extends DDSpecification {
false,
"fakeType",
tags,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
then:

View File

@ -5,10 +5,10 @@ import datadog.trace.util.test.DDSpecification
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.B3HttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.B3HttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.B3HttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class B3HttpExtractorTest extends DDSpecification {
@ -30,20 +30,20 @@ class B3HttpExtractorTest extends DDSpecification {
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context.traceId == traceId.toString()
context.spanId == spanId.toString()
context.traceId == traceId
context.spanId == spanId
context.baggage == [:]
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == expectedSamplingPriority
context.origin == null
where:
traceId | spanId | samplingPriority | expectedSamplingPriority
1G | 2G | null | PrioritySampling.UNSET
2G | 3G | 1 | PrioritySampling.SAMPLER_KEEP
3G | 4G | 0 | PrioritySampling.SAMPLER_DROP
UINT64_MAX | UINT64_MAX.minus(1) | 0 | PrioritySampling.SAMPLER_DROP
UINT64_MAX.minus(1) | UINT64_MAX | 1 | PrioritySampling.SAMPLER_KEEP
traceId | spanId | samplingPriority | expectedSamplingPriority
1G | 2G | null | PrioritySampling.UNSET
2G | 3G | 1 | PrioritySampling.SAMPLER_KEEP
3G | 4G | 0 | PrioritySampling.SAMPLER_DROP
TRACE_ID_MAX | TRACE_ID_MAX - 1 | 0 | PrioritySampling.SAMPLER_DROP
TRACE_ID_MAX - 1 | TRACE_ID_MAX | 1 | PrioritySampling.SAMPLER_KEEP
}
def "extract 128 bit id truncates id to 64 bit"() {
@ -65,20 +65,20 @@ class B3HttpExtractorTest extends DDSpecification {
}
where:
traceId | spanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | "0"
"1" | "-1" | null | "0"
"0" | "1" | null | "0"
"00001" | "00001" | "1" | "1"
"463ac35c9f6413ad" | "463ac35c9f6413ad" | "5060571933882717101" | "5060571933882717101"
"463ac35c9f6413ad48485a3953bb6124" | "1" | "5208512171318403364" | "1"
"f".multiply(16) | "1" | "$UINT64_MAX" | "1"
"a".multiply(16) + "f".multiply(16) | "1" | "$UINT64_MAX" | "1"
"1" + "f".multiply(32) | "1" | null | "1"
"0" + "f".multiply(32) | "1" | null | "1"
"1" | "f".multiply(16) | "1" | "$UINT64_MAX"
"1" | "1" + "f".multiply(16) | null | "0"
"1" | "000" + "f".multiply(16) | "1" | "$UINT64_MAX"
traceId | spanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | BigInteger.ZERO
"1" | "-1" | null | BigInteger.ZERO
"0" | "1" | null | BigInteger.ZERO
"00001" | "00001" | BigDecimal.ONE | BigInteger.ONE
"463ac35c9f6413ad" | "463ac35c9f6413ad" | new BigDecimal("5060571933882717101") | new BigDecimal("5060571933882717101")
"463ac35c9f6413ad48485a3953bb6124" | "1" | new BigDecimal("5208512171318403364") | BigInteger.ONE
"f" * 16 | "1" | TRACE_ID_MAX | BigInteger.ONE
"a" * 16 + "f" * 16 | "1" | TRACE_ID_MAX | BigInteger.ONE
"1" + "f" * 32 | "1" | null | BigInteger.ONE
"0" + "f" * 32 | "1" | null | BigInteger.ONE
"1" | "f" * 16 | BigInteger.ONE | TRACE_ID_MAX
"1" | "1" + "f" * 16 | null | BigInteger.ZERO
"1" | "000" + "f" * 16 | BigInteger.ONE | TRACE_ID_MAX
}
def "extract header tags with no propagation"() {

View File

@ -8,10 +8,10 @@ import datadog.trace.common.writer.ListWriter
import datadog.trace.util.test.DDSpecification
import io.opentracing.propagation.TextMapInjectAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.B3HttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.B3HttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.B3HttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class B3HttpInjectorTest extends DDSpecification {
@ -25,7 +25,7 @@ class B3HttpInjectorTest extends DDSpecification {
new DDSpanContext(
traceId,
spanId,
"0",
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -40,7 +40,7 @@ class B3HttpInjectorTest extends DDSpecification {
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
final Map<String, String> carrier = Mock()
@ -49,61 +49,21 @@ class B3HttpInjectorTest extends DDSpecification {
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
1 * carrier.put(TRACE_ID_KEY, new BigInteger(traceId).toString(16).toLowerCase())
1 * carrier.put(SPAN_ID_KEY, new BigInteger(spanId).toString(16).toLowerCase())
1 * carrier.put(TRACE_ID_KEY, traceId.toString(16).toLowerCase())
1 * carrier.put(SPAN_ID_KEY, spanId.toString(16).toLowerCase())
if (expectedSamplingPriority != null) {
1 * carrier.put(SAMPLING_PRIORITY_KEY, "$expectedSamplingPriority")
}
0 * _
where:
traceId | spanId | samplingPriority | expectedSamplingPriority
"1" | "2" | PrioritySampling.UNSET | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | 1
"4" | "5" | PrioritySampling.SAMPLER_DROP | 0
"5" | "6" | PrioritySampling.USER_KEEP | 1
"6" | "7" | PrioritySampling.USER_DROP | 0
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | null
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | 1
}
def "unparseable ids"() {
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
traceId,
spanId,
"0",
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
"fakeOrigin",
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
tracer)
final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
0 * _
where:
traceId | spanId | samplingPriority
"abc" | "1" | PrioritySampling.UNSET
"1" | "cbd" | PrioritySampling.SAMPLER_KEEP
traceId | spanId | samplingPriority | expectedSamplingPriority
BigInteger.valueOf(1) | BigInteger.valueOf(2) | PrioritySampling.UNSET | null
BigInteger.valueOf(2) | BigInteger.valueOf(3) | PrioritySampling.SAMPLER_KEEP | 1
BigInteger.valueOf(4) | BigInteger.valueOf(5) | PrioritySampling.SAMPLER_DROP | 0
BigInteger.valueOf(5) | BigInteger.valueOf(6) | PrioritySampling.USER_KEEP | 1
BigInteger.valueOf(6) | BigInteger.valueOf(7) | PrioritySampling.USER_DROP | 0
TRACE_ID_MAX | TRACE_ID_MAX - 1 | PrioritySampling.UNSET | null
TRACE_ID_MAX - 1 | TRACE_ID_MAX | PrioritySampling.SAMPLER_KEEP | 1
}
}

View File

@ -5,12 +5,12 @@ import datadog.trace.util.test.DDSpecification
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class DatadogHttpExtractorTest extends DDSpecification {
@ -19,8 +19,8 @@ class DatadogHttpExtractorTest extends DDSpecification {
def "extract http headers"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()) : traceId,
(SPAN_ID_KEY.toUpperCase()) : spanId,
(TRACE_ID_KEY.toUpperCase()) : traceId.toString(),
(SPAN_ID_KEY.toUpperCase()) : spanId.toString(),
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
@ -38,19 +38,19 @@ class DatadogHttpExtractorTest extends DDSpecification {
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context.traceId == traceId
context.spanId == spanId
context.traceId == new BigInteger(traceId)
context.spanId == new BigInteger(spanId)
context.baggage == ["k1": "v1", "k2": "v2"]
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == samplingPriority
context.origin == origin
where:
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | "saipan"
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | "saipan"
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | "saipan"
TRACE_ID_MAX.toString() | (TRACE_ID_MAX - 1).toString() | PrioritySampling.UNSET | "saipan"
(TRACE_ID_MAX - 1).toString() | TRACE_ID_MAX.toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
}
def "extract header tags with no propagation"() {
@ -94,7 +94,7 @@ class DatadogHttpExtractorTest extends DDSpecification {
def "extract http headers with out of range trace ID"() {
setup:
String outOfRangeTraceId = UINT64_MAX.add(BigInteger.ONE).toString()
String outOfRangeTraceId = TRACE_ID_MAX.add(BigInteger.ONE).toString()
def headers = [
(TRACE_ID_KEY.toUpperCase()) : outOfRangeTraceId,
(SPAN_ID_KEY.toUpperCase()) : "0",
@ -146,15 +146,15 @@ class DatadogHttpExtractorTest extends DDSpecification {
}
where:
gtTraceId | gSpanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | "0"
"1" | "-1" | null | "0"
"0" | "1" | null | "0"
"1" | "0" | "1" | "0"
"$UINT64_MAX" | "1" | "$UINT64_MAX" | "1"
"${UINT64_MAX.plus(1)}" | "1" | null | "1"
"1" | "$UINT64_MAX" | "1" | "$UINT64_MAX"
"1" | "${UINT64_MAX.plus(1)}" | null | "0"
gtTraceId | gSpanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | BigInteger.ZERO
"1" | "-1" | null | BigInteger.ZERO
"0" | "1" | null | BigInteger.ZERO
"1" | "0" | BigInteger.ONE | BigInteger.ZERO
"$TRACE_ID_MAX" | "1" | TRACE_ID_MAX | BigInteger.ONE
"${TRACE_ID_MAX + 1}" | "1" | null | BigInteger.ONE
"1" | "$TRACE_ID_MAX" | BigInteger.ONE | TRACE_ID_MAX
"1" | "${TRACE_ID_MAX + 1}" | null | BigInteger.ZERO
traceId = gtTraceId.toString()
spanId = gSpanId.toString()

View File

@ -8,12 +8,12 @@ import datadog.trace.common.writer.ListWriter
import datadog.trace.util.test.DDSpecification
import io.opentracing.propagation.TextMapInjectAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class DatadogHttpInjectorTest extends DDSpecification {
@ -27,7 +27,7 @@ class DatadogHttpInjectorTest extends DDSpecification {
new DDSpanContext(
traceId,
spanId,
"0",
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -42,7 +42,7 @@ class DatadogHttpInjectorTest extends DDSpecification {
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
final Map<String, String> carrier = Mock()
@ -51,8 +51,8 @@ class DatadogHttpInjectorTest extends DDSpecification {
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
1 * carrier.put(TRACE_ID_KEY, traceId)
1 * carrier.put(SPAN_ID_KEY, spanId)
1 * carrier.put(TRACE_ID_KEY, traceId.toString())
1 * carrier.put(SPAN_ID_KEY, spanId.toString())
1 * carrier.put(OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(OT_BAGGAGE_PREFIX + "k2", "v2")
if (samplingPriority != PrioritySampling.UNSET) {
@ -64,10 +64,10 @@ class DatadogHttpInjectorTest extends DDSpecification {
0 * _
where:
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"1" | "2" | PrioritySampling.SAMPLER_KEEP | "saipan"
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | "saipan"
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | null
traceId | spanId | samplingPriority | origin
BigInteger.ONE | BigInteger.valueOf(2) | PrioritySampling.UNSET | null
BigInteger.ONE | BigInteger.valueOf(2) | PrioritySampling.SAMPLER_KEEP | "saipan"
TRACE_ID_MAX | TRACE_ID_MAX - 1 | PrioritySampling.UNSET | "saipan"
TRACE_ID_MAX - 1 | TRACE_ID_MAX | PrioritySampling.SAMPLER_KEEP | null
}
}

View File

@ -5,10 +5,10 @@ import datadog.trace.util.test.DDSpecification
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.HaystackHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.HaystackHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.HaystackHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class HaystackHttpExtractorTest extends DDSpecification {
@ -28,19 +28,19 @@ class HaystackHttpExtractorTest extends DDSpecification {
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context.traceId == traceId
context.spanId == spanId
context.traceId == new BigInteger(traceId)
context.spanId == new BigInteger(spanId)
context.baggage == ["k1": "v1", "k2": "v2"]
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == samplingPriority
context.origin == origin
where:
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.SAMPLER_KEEP | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | null
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.SAMPLER_KEEP | null
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | null
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.SAMPLER_KEEP | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | null
TRACE_ID_MAX.toString() | (TRACE_ID_MAX - 1).toString() | PrioritySampling.SAMPLER_KEEP | null
(TRACE_ID_MAX - 1).toString() | TRACE_ID_MAX.toString() | PrioritySampling.SAMPLER_KEEP | null
}
def "extract header tags with no propagation"() {
@ -81,7 +81,7 @@ class HaystackHttpExtractorTest extends DDSpecification {
def "extract http headers with out of range trace ID"() {
setup:
String outOfRangeTraceId = UINT64_MAX.add(BigInteger.ONE).toString()
String outOfRangeTraceId = (TRACE_ID_MAX + 1).toString()
def headers = [
(TRACE_ID_KEY.toUpperCase()) : outOfRangeTraceId,
(SPAN_ID_KEY.toUpperCase()) : "0",
@ -133,15 +133,15 @@ class HaystackHttpExtractorTest extends DDSpecification {
}
where:
gtTraceId | gSpanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | "0"
"1" | "-1" | null | "0"
"0" | "1" | null | "0"
"1" | "0" | "1" | "0"
"$UINT64_MAX" | "1" | "$UINT64_MAX" | "1"
"${UINT64_MAX.plus(1)}" | "1" | null | "1"
"1" | "$UINT64_MAX" | "1" | "$UINT64_MAX"
"1" | "${UINT64_MAX.plus(1)}" | null | "0"
gtTraceId | gSpanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | BigInteger.ZERO
"1" | "-1" | null | BigInteger.ZERO
"0" | "1" | null | BigInteger.ZERO
"1" | "0" | BigInteger.ONE | BigInteger.ZERO
"$TRACE_ID_MAX" | "1" | TRACE_ID_MAX | BigInteger.ONE
"${TRACE_ID_MAX + 1}" | "1" | null | BigInteger.ONE
"1" | "$TRACE_ID_MAX" | BigInteger.ONE | TRACE_ID_MAX
"1" | "${TRACE_ID_MAX + 1}" | null | BigInteger.ZERO
traceId = gtTraceId.toString()
spanId = gSpanId.toString()

View File

@ -8,10 +8,10 @@ import datadog.trace.common.writer.ListWriter
import datadog.trace.util.test.DDSpecification
import io.opentracing.propagation.TextMapInjectAdapter
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.opentracing.propagation.HaystackHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.HaystackHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.HaystackHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class HaystackHttpInjectorTest extends DDSpecification {
@ -25,7 +25,7 @@ class HaystackHttpInjectorTest extends DDSpecification {
new DDSpanContext(
traceId,
spanId,
"0",
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -40,7 +40,7 @@ class HaystackHttpInjectorTest extends DDSpecification {
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
final Map<String, String> carrier = Mock()
@ -49,17 +49,17 @@ class HaystackHttpInjectorTest extends DDSpecification {
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
1 * carrier.put(TRACE_ID_KEY, traceId)
1 * carrier.put(SPAN_ID_KEY, spanId)
1 * carrier.put(TRACE_ID_KEY, traceId.toString())
1 * carrier.put(SPAN_ID_KEY, spanId.toString())
1 * carrier.put(OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(OT_BAGGAGE_PREFIX + "k2", "v2")
where:
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.SAMPLER_KEEP | null
"1" | "2" | PrioritySampling.SAMPLER_KEEP | null
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.SAMPLER_KEEP | null
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | null
traceId | spanId | samplingPriority | origin
BigInteger.ONE | BigInteger.valueOf(2) | PrioritySampling.SAMPLER_KEEP | null
BigInteger.ONE | BigInteger.valueOf(2) | PrioritySampling.SAMPLER_KEEP | null
TRACE_ID_MAX | TRACE_ID_MAX - 1 | PrioritySampling.SAMPLER_KEEP | null
TRACE_ID_MAX - 1 | TRACE_ID_MAX | PrioritySampling.SAMPLER_KEEP | null
}
}

View File

@ -1,20 +1,19 @@
package datadog.opentracing.propagation
import datadog.trace.api.Config
import datadog.trace.util.test.DDSpecification
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Shared
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
import static datadog.opentracing.DDTracer.TRACE_ID_MAX
import static datadog.trace.api.Config.PropagationStyle.B3
import static datadog.trace.api.Config.PropagationStyle.DATADOG
class HttpExtractorTest extends DDSpecification {
@Shared
String outOfRangeTraceId = UINT64_MAX.add(BigInteger.ONE)
String outOfRangeTraceId = (TRACE_ID_MAX + 1).toString()
def "extract http headers"() {
setup:
@ -61,23 +60,22 @@ class HttpExtractorTest extends DDSpecification {
}
where:
styles | datadogTraceId | datadogSpanId | b3TraceId | b3SpanId | expectedTraceId | expectedSpanId | putDatadogFields | expectDatadogFields | tagContext
[DATADOG, B3] | "1" | "2" | "a" | "b" | "1" | "2" | true | true | false
[DATADOG, B3] | null | null | "a" | "b" | "a" | "b" | false | false | true
[DATADOG, B3] | null | null | "a" | "b" | null | null | true | true | true
[DATADOG] | "1" | "2" | "a" | "b" | "1" | "2" | true | true | false
[B3] | "1" | "2" | "a" | "b" | "10" | "11" | false | false | false
[B3, DATADOG] | "1" | "2" | "a" | "b" | "10" | "11" | false | false | false
[] | "1" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "abc" | "2" | "a" | "b" | "10" | "11" | false | false | false
[DATADOG] | "abc" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | outOfRangeTraceId.toString() | "2" | "a" | "b" | "10" | "11" | false | false | false
[DATADOG, B3] | "1" | outOfRangeTraceId.toString() | "a" | "b" | "10" | "11" | false | false | false
[DATADOG] | outOfRangeTraceId.toString() | "2" | "a" | "b" | null | null | false | false | false
[DATADOG] | "1" | outOfRangeTraceId.toString() | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "1" | "2" | outOfRangeTraceId.toString() | "b" | "1" | "2" | true | false | false
[DATADOG, B3] | "1" | "2" | "a" | outOfRangeTraceId.toString() | "1" | "2" | true | false | false
styles | datadogTraceId | datadogSpanId | b3TraceId | b3SpanId | expectedTraceId | expectedSpanId | putDatadogFields | expectDatadogFields | tagContext
[DATADOG, B3] | "1" | "2" | "a" | "b" | BigInteger.ONE | BigDecimal.valueOf(2) | true | true | false
[DATADOG, B3] | null | null | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | true
[DATADOG, B3] | null | null | "a" | "b" | null | null | true | true | true
[DATADOG] | "1" | "2" | "a" | "b" | BigInteger.ONE | BigDecimal.valueOf(2) | true | true | false
[B3] | "1" | "2" | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | false
[B3, DATADOG] | "1" | "2" | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | false
[] | "1" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "abc" | "2" | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | false
[DATADOG] | "abc" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | outOfRangeTraceId | "2" | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | false
[DATADOG, B3] | "1" | outOfRangeTraceId | "a" | "b" | BigInteger.TEN | BigDecimal.valueOf(11) | false | false | false
[DATADOG] | outOfRangeTraceId | "2" | "a" | "b" | null | null | false | false | false
[DATADOG] | "1" | outOfRangeTraceId | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "1" | "2" | outOfRangeTraceId | "b" | BigInteger.ONE | BigDecimal.valueOf(2) | true | false | false
[DATADOG, B3] | "1" | "2" | "a" | outOfRangeTraceId | BigInteger.ONE | BigDecimal.valueOf(2) | true | false | false
}
}

View File

@ -21,8 +21,8 @@ class HttpInjectorTest extends DDSpecification {
}
HttpCodec.Injector injector = HttpCodec.createInjector(config)
def traceId = "1"
def spanId = "2"
def traceId = BigInteger.ONE
def spanId = BigInteger.valueOf(2)
def writer = new ListWriter()
def tracer = new DDTracer(writer)
@ -30,7 +30,7 @@ class HttpInjectorTest extends DDSpecification {
new DDSpanContext(
traceId,
spanId,
"0",
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -45,7 +45,7 @@ class HttpInjectorTest extends DDSpecification {
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
new PendingTrace(tracer, BigInteger.ONE, [:]),
tracer)
final Map<String, String> carrier = Mock()
@ -55,8 +55,8 @@ class HttpInjectorTest extends DDSpecification {
then:
if (styles.contains(DATADOG)) {
1 * carrier.put(DatadogHttpCodec.TRACE_ID_KEY, traceId)
1 * carrier.put(DatadogHttpCodec.SPAN_ID_KEY, spanId)
1 * carrier.put(DatadogHttpCodec.TRACE_ID_KEY, traceId.toString())
1 * carrier.put(DatadogHttpCodec.SPAN_ID_KEY, spanId.toString())
1 * carrier.put(DatadogHttpCodec.OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(DatadogHttpCodec.OT_BAGGAGE_PREFIX + "k2", "v2")
if (samplingPriority != PrioritySampling.UNSET) {
@ -67,8 +67,8 @@ class HttpInjectorTest extends DDSpecification {
}
}
if (styles.contains(B3)) {
1 * carrier.put(B3HttpCodec.TRACE_ID_KEY, traceId)
1 * carrier.put(B3HttpCodec.SPAN_ID_KEY, spanId)
1 * carrier.put(B3HttpCodec.TRACE_ID_KEY, traceId.toString())
1 * carrier.put(B3HttpCodec.SPAN_ID_KEY, spanId.toString())
if (samplingPriority != PrioritySampling.UNSET) {
1 * carrier.put(B3HttpCodec.SAMPLING_PRIORITY_KEY, "1")
}

View File

@ -161,9 +161,9 @@ class DDAgentWriterTest extends DDSpecification {
where:
minimalContext = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"",
"",
"",
@ -199,9 +199,9 @@ class DDAgentWriterTest extends DDSpecification {
def createMinimalTrace() {
def minimalContext = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"",
"",
"",
@ -318,8 +318,7 @@ class DDAgentWriterTest extends DDSpecification {
DDApi.Response sendSerializedTraces(
int representativeCount,
Integer sizeInBytes,
List<byte[]> traces)
{
List<byte[]> traces) {
// simulating a communication failure to a server
return DDApi.Response.failed(new IOException("comm error"))
}
@ -547,8 +546,7 @@ class DDAgentWriterTest extends DDSpecification {
DDApi.Response sendSerializedTraces(
int representativeCount,
Integer sizeInBytes,
List<byte[]> traces)
{
List<byte[]> traces) {
// simulating a communication failure to a server
return DDApi.Response.failed(new IOException("comm error"))
}

View File

@ -24,9 +24,9 @@ class DDApiIntegrationTest {
static final WRITER = new ListWriter()
static final TRACER = new DDTracer(WRITER)
static final CONTEXT = new DDSpanContext(
"1",
"1",
"0",
BigInteger.ONE,
BigInteger.ONE,
BigInteger.ZERO,
"fakeService",
"fakeOperation",
"fakeResource",
@ -36,7 +36,7 @@ class DDApiIntegrationTest {
false,
"fakeType",
Collections.emptyMap(),
new PendingTrace(TRACER, "1", [:]),
new PendingTrace(TRACER, BigInteger.ONE, [:]),
TRACER)
// Looks like okHttp needs to resolve this, even for connection over socket