Merge pull request #756 from DataDog/mar-kolya/b3-headers

Implement B3 headers extraction and injection
This commit is contained in:
Tyler Benson 2019-03-21 10:35:33 -07:00 committed by GitHub
commit 18c7916338
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 1085 additions and 198 deletions

View File

@ -1,6 +1,8 @@
apply from: "${rootDir}/gradle/java.gradle"
apply from: "${rootDir}/gradle/publish.gradle"
minimumBranchCoverage = 0.8
// These are tested outside of this module since this module mainly just defines 'API'
excludedClassesConverage += [
'datadog.trace.api.DDSpanTypes',

View File

@ -4,6 +4,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
@ -55,6 +56,8 @@ public class Config {
public static final String PARTIAL_FLUSH_MIN_SPANS = "trace.partial.flush.min.spans";
public static final String RUNTIME_CONTEXT_FIELD_INJECTION =
"trace.runtime.context.field.injection";
public static final String PROPAGATION_STYLE_EXTRACT = "propagation.style.extract";
public static final String PROPAGATION_STYLE_INJECT = "propagation.style.inject";
public static final String JMX_FETCH_ENABLED = "jmxfetch.enabled";
public static final String JMX_FETCH_METRICS_CONFIGS = "jmxfetch.metrics-configs";
public static final String JMX_FETCH_CHECK_PERIOD = "jmxfetch.check-period";
@ -89,14 +92,20 @@ public class Config {
parseIntegerRangeSet("400-499", "default");
private static final boolean DEFAULT_HTTP_CLIENT_SPLIT_BY_DOMAIN = false;
private static final int DEFAULT_PARTIAL_FLUSH_MIN_SPANS = 0;
private static final String DEFAULT_PROPAGATION_STYLE_EXTRACT = PropagationStyle.DATADOG.name();
private static final String DEFAULT_PROPAGATION_STYLE_INJECT = PropagationStyle.DATADOG.name();
private static final boolean DEFAULT_JMX_FETCH_ENABLED = false;
public static final int DEFAULT_JMX_FETCH_STATSD_PORT = 8125;
private static final boolean DEFAULT_APP_CUSTOM_LOG_MANAGER = false;
// Must be defined last to allow above defaults to be properly initialized.
private static final Config INSTANCE = new Config();
private static final String SPLIT_BY_SPACE_OR_COMMA_REGEX = "[,\\s]+";
public enum PropagationStyle {
DATADOG,
B3
}
/**
* this is a random UUID that gets generated on JVM start up and is attached to every root span
@ -121,6 +130,8 @@ public class Config {
@Getter private final boolean httpClientSplitByDomain;
@Getter private final Integer partialFlushMinSpans;
@Getter private final boolean runtimeContextFieldInjection;
@Getter private final Set<PropagationStyle> propagationStylesToExtract;
@Getter private final Set<PropagationStyle> propagationStylesToInject;
@Getter private final boolean jmxFetchEnabled;
@Getter private final List<String> jmxFetchMetricsConfigs;
@Getter private final Integer jmxFetchCheckPeriod;
@ -174,6 +185,19 @@ public class Config {
getBooleanSettingFromEnvironment(
RUNTIME_CONTEXT_FIELD_INJECTION, DEFAULT_RUNTIME_CONTEXT_FIELD_INJECTION);
propagationStylesToExtract =
getEnumSetSettingFromEnvironment(
PROPAGATION_STYLE_EXTRACT,
DEFAULT_PROPAGATION_STYLE_EXTRACT,
PropagationStyle.class,
true);
propagationStylesToInject =
getEnumSetSettingFromEnvironment(
PROPAGATION_STYLE_INJECT,
DEFAULT_PROPAGATION_STYLE_INJECT,
PropagationStyle.class,
true);
jmxFetchEnabled =
getBooleanSettingFromEnvironment(JMX_FETCH_ENABLED, DEFAULT_JMX_FETCH_ENABLED);
jmxFetchMetricsConfigs = getListSettingFromEnvironment(JMX_FETCH_METRICS_CONFIGS, null);
@ -235,6 +259,19 @@ public class Config {
getPropertyBooleanValue(
properties, RUNTIME_CONTEXT_FIELD_INJECTION, parent.runtimeContextFieldInjection);
final Set<PropagationStyle> parsedPropagationStylesToExtract =
getPropertySetValue(properties, PROPAGATION_STYLE_EXTRACT, PropagationStyle.class);
propagationStylesToExtract =
parsedPropagationStylesToExtract == null
? parent.propagationStylesToExtract
: parsedPropagationStylesToExtract;
final Set<PropagationStyle> parsedPropagationStylesToInject =
getPropertySetValue(properties, PROPAGATION_STYLE_INJECT, PropagationStyle.class);
propagationStylesToInject =
parsedPropagationStylesToInject == null
? parent.propagationStylesToInject
: parsedPropagationStylesToInject;
jmxFetchEnabled =
getPropertyBooleanValue(properties, JMX_FETCH_ENABLED, parent.jmxFetchEnabled);
jmxFetchMetricsConfigs =
@ -357,10 +394,6 @@ public class Config {
/**
* Calls {@link #getSettingFromEnvironment(String, String)} and converts the result to a Boolean.
*
* @param name
* @param defaultValue
* @return
*/
public static Boolean getBooleanSettingFromEnvironment(
final String name, final Boolean defaultValue) {
@ -370,10 +403,6 @@ public class Config {
/**
* Calls {@link #getSettingFromEnvironment(String, String)} and converts the result to a Float.
*
* @param name
* @param defaultValue
* @return
*/
public static Float getFloatSettingFromEnvironment(final String name, final Float defaultValue) {
final String value = getSettingFromEnvironment(name, null);
@ -385,6 +414,9 @@ public class Config {
}
}
/**
* Calls {@link #getSettingFromEnvironment(String, String)} and converts the result to a Integer.
*/
private static Integer getIntegerSettingFromEnvironment(
final String name, final Integer defaultValue) {
final String value = getSettingFromEnvironment(name, null);
@ -396,6 +428,31 @@ public class Config {
}
}
/**
* Calls {@link #getSettingFromEnvironment(String, String)} and converts the result to a set of
* strings splitting by space or comma.
*/
private static <T extends Enum<T>> Set<T> getEnumSetSettingFromEnvironment(
final String name,
final String defaultValue,
final Class<T> clazz,
final boolean emptyResultMeansUseDefault) {
final String value = getSettingFromEnvironment(name, defaultValue);
Set<T> result =
convertStringSetToEnumSet(
parseStringIntoSetOfNonEmptyStrings(value, SPLIT_BY_SPACE_OR_COMMA_REGEX), clazz);
if (emptyResultMeansUseDefault && result.isEmpty()) {
// Treat empty parsing result as no value and use default instead
result =
convertStringSetToEnumSet(
parseStringIntoSetOfNonEmptyStrings(defaultValue, SPLIT_BY_SPACE_OR_COMMA_REGEX),
clazz);
}
return result;
}
private Set<Integer> getIntegerRangeSettingFromEnvironment(
final String name, final Set<Integer> defaultValue) {
final String value = getSettingFromEnvironment(name, null);
@ -435,6 +492,21 @@ public class Config {
return value == null || value.trim().isEmpty() ? defaultValue : Integer.valueOf(value);
}
private static <T extends Enum<T>> Set<T> getPropertySetValue(
final Properties properties, final String name, final Class<T> clazz) {
final String value = properties.getProperty(name);
if (value != null) {
final Set<T> result =
convertStringSetToEnumSet(
parseStringIntoSetOfNonEmptyStrings(value, SPLIT_BY_SPACE_OR_COMMA_REGEX), clazz);
if (!result.isEmpty()) {
return result;
}
}
// null means parent value should be used
return null;
}
private Set<Integer> getPropertyIntegerRangeValue(
final Properties properties, final String name, final Set<Integer> defaultValue) {
final String value = properties.getProperty(name);
@ -477,9 +549,7 @@ public class Config {
private static Set<Integer> parseIntegerRangeSet(String str, final String settingName)
throws NumberFormatException {
if (str == null) {
str = "";
}
assert str != null;
str = str.replaceAll("\\s", "");
if (!str.matches("\\d{3}(?:-\\d{3})?(?:,\\d{3}(?:-\\d{3})?)*")) {
log.warn(
@ -522,6 +592,37 @@ public class Config {
return Collections.unmodifiableList(Arrays.asList(tokens));
}
private static Set<String> parseStringIntoSetOfNonEmptyStrings(
final String str, final String regex) {
// Using LinkedHashSet to preserve original string order
final Set<String> result = new LinkedHashSet<>();
// Java returns single value when splitting an empty string. We do not need that value, so
// we need to throw it out.
for (final String value : str.split(regex)) {
if (!value.isEmpty()) {
result.add(value);
}
}
return Collections.unmodifiableSet(result);
}
private static <V extends Enum<V>> Set<V> convertStringSetToEnumSet(
final Set<String> input, final Class<V> clazz) {
// Using LinkedHashSet to preserve original string order
final Set<V> result = new LinkedHashSet<>();
for (final String value : input) {
try {
result.add(Enum.valueOf(clazz, value.toUpperCase()));
} catch (final IllegalArgumentException e) {
log.debug("Cannot recognize config string value: {}, {}", value, clazz);
}
}
return Collections.unmodifiableSet(result);
}
// This has to be placed after all other static fields to give them a chance to initialize
private static final Config INSTANCE = new Config();
public static Config get() {
return INSTANCE;
}

View File

@ -26,6 +26,8 @@ import static datadog.trace.api.Config.LANGUAGE_TAG_VALUE
import static datadog.trace.api.Config.PARTIAL_FLUSH_MIN_SPANS
import static datadog.trace.api.Config.PREFIX
import static datadog.trace.api.Config.PRIORITY_SAMPLING
import static datadog.trace.api.Config.PROPAGATION_STYLE_EXTRACT
import static datadog.trace.api.Config.PROPAGATION_STYLE_INJECT
import static datadog.trace.api.Config.RUNTIME_CONTEXT_FIELD_INJECTION
import static datadog.trace.api.Config.RUNTIME_ID_TAG
import static datadog.trace.api.Config.SERVICE
@ -47,13 +49,15 @@ class ConfigTest extends Specification {
private static final DD_SERVICE_MAPPING_ENV = "DD_SERVICE_MAPPING"
private static final DD_SPAN_TAGS_ENV = "DD_SPAN_TAGS"
private static final DD_HEADER_TAGS_ENV = "DD_HEADER_TAGS"
private static final DD_PROPAGATION_STYLE_EXTRACT = "DD_PROPAGATION_STYLE_EXTRACT"
private static final DD_PROPAGATION_STYLE_INJECT = "DD_PROPAGATION_STYLE_INJECT"
private static final DD_JMXFETCH_METRICS_CONFIGS_ENV = "DD_JMXFETCH_METRICS_CONFIGS"
private static final DD_TRACE_AGENT_PORT_ENV = "DD_TRACE_AGENT_PORT"
private static final DD_AGENT_PORT_LEGACY_ENV = "DD_AGENT_PORT"
def "verify defaults"() {
when:
def config = Config.get()
Config config = provider()
then:
config.serviceName == "unnamed-java-app"
@ -72,6 +76,8 @@ class ConfigTest extends Specification {
config.httpClientSplitByDomain == false
config.partialFlushMinSpans == 0
config.runtimeContextFieldInjection == true
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == false
config.jmxFetchMetricsConfigs == []
config.jmxFetchCheckPeriod == null
@ -79,6 +85,73 @@ class ConfigTest extends Specification {
config.jmxFetchStatsdHost == null
config.jmxFetchStatsdPort == DEFAULT_JMX_FETCH_STATSD_PORT
config.toString().contains("unnamed-java-app")
where:
provider << [{ new Config() }, { Config.get() }, {
def props = new Properties()
props.setProperty("something", "unused")
Config.get(props)
}]
}
def "specify overrides via properties"() {
setup:
def prop = new Properties()
prop.setProperty(SERVICE_NAME, "something else")
prop.setProperty(WRITER_TYPE, "LoggingWriter")
prop.setProperty(AGENT_HOST, "somehost")
prop.setProperty(TRACE_AGENT_PORT, "123")
prop.setProperty(AGENT_UNIX_DOMAIN_SOCKET, "somepath")
prop.setProperty(AGENT_PORT_LEGACY, "456")
prop.setProperty(PRIORITY_SAMPLING, "false")
prop.setProperty(TRACE_RESOLVER_ENABLED, "false")
prop.setProperty(SERVICE_MAPPING, "a:1")
prop.setProperty(GLOBAL_TAGS, "b:2")
prop.setProperty(SPAN_TAGS, "c:3")
prop.setProperty(JMX_TAGS, "d:4")
prop.setProperty(HEADER_TAGS, "e:5")
prop.setProperty(HTTP_SERVER_ERROR_STATUSES, "123-456,457,124-125,122")
prop.setProperty(HTTP_CLIENT_ERROR_STATUSES, "111")
prop.setProperty(HTTP_CLIENT_HOST_SPLIT_BY_DOMAIN, "true")
prop.setProperty(PARTIAL_FLUSH_MIN_SPANS, "15")
prop.setProperty(RUNTIME_CONTEXT_FIELD_INJECTION, "false")
prop.setProperty(PROPAGATION_STYLE_EXTRACT, "Datadog, B3")
prop.setProperty(PROPAGATION_STYLE_INJECT, "B3, Datadog")
prop.setProperty(JMX_FETCH_ENABLED, "true")
prop.setProperty(JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml")
prop.setProperty(JMX_FETCH_CHECK_PERIOD, "100")
prop.setProperty(JMX_FETCH_REFRESH_BEANS_PERIOD, "200")
prop.setProperty(JMX_FETCH_STATSD_HOST, "statsd host")
prop.setProperty(JMX_FETCH_STATSD_PORT, "321")
when:
Config config = Config.get(prop)
then:
config.serviceName == "something else"
config.writerType == "LoggingWriter"
config.agentHost == "somehost"
config.agentPort == 123
config.agentUnixDomainSocket == "somepath"
config.prioritySamplingEnabled == false
config.traceResolverEnabled == false
config.serviceMapping == [a: "1"]
config.mergedSpanTags == [b: "2", c: "3"]
config.mergedJmxTags == [b: "2", d: "4", (RUNTIME_ID_TAG): config.getRuntimeId(), (SERVICE): config.serviceName, (LANGUAGE_TAG_KEY): LANGUAGE_TAG_VALUE]
config.headerTags == [e: "5"]
config.httpServerErrorStatuses == (122..457).toSet()
config.httpClientErrorStatuses == (111..111).toSet()
config.httpClientSplitByDomain == true
config.partialFlushMinSpans == 15
config.runtimeContextFieldInjection == false
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == true
config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"]
config.jmxFetchCheckPeriod == 100
config.jmxFetchRefreshBeansPeriod == 200
config.jmxFetchStatsdHost == "statsd host"
config.jmxFetchStatsdPort == 321
}
def "specify overrides via system properties"() {
@ -101,6 +174,8 @@ class ConfigTest extends Specification {
System.setProperty(PREFIX + HTTP_CLIENT_HOST_SPLIT_BY_DOMAIN, "true")
System.setProperty(PREFIX + PARTIAL_FLUSH_MIN_SPANS, "15")
System.setProperty(PREFIX + RUNTIME_CONTEXT_FIELD_INJECTION, "false")
System.setProperty(PREFIX + PROPAGATION_STYLE_EXTRACT, "Datadog, B3")
System.setProperty(PREFIX + PROPAGATION_STYLE_INJECT, "B3, Datadog")
System.setProperty(PREFIX + JMX_FETCH_ENABLED, "true")
System.setProperty(PREFIX + JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml")
System.setProperty(PREFIX + JMX_FETCH_CHECK_PERIOD, "100")
@ -109,7 +184,7 @@ class ConfigTest extends Specification {
System.setProperty(PREFIX + JMX_FETCH_STATSD_PORT, "321")
when:
def config = new Config()
Config config = new Config()
then:
config.serviceName == "something else"
@ -128,6 +203,8 @@ class ConfigTest extends Specification {
config.httpClientSplitByDomain == true
config.partialFlushMinSpans == 15
config.runtimeContextFieldInjection == false
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == true
config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"]
config.jmxFetchCheckPeriod == 100
@ -140,6 +217,8 @@ class ConfigTest extends Specification {
setup:
environmentVariables.set(DD_SERVICE_NAME_ENV, "still something else")
environmentVariables.set(DD_WRITER_TYPE_ENV, "LoggingWriter")
environmentVariables.set(DD_PROPAGATION_STYLE_EXTRACT, "B3 Datadog")
environmentVariables.set(DD_PROPAGATION_STYLE_INJECT, "Datadog B3")
environmentVariables.set(DD_JMXFETCH_METRICS_CONFIGS_ENV, "some/file")
when:
@ -148,6 +227,8 @@ class ConfigTest extends Specification {
then:
config.serviceName == "still something else"
config.writerType == "LoggingWriter"
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.jmxFetchMetricsConfigs == ["some/file"]
}
@ -187,6 +268,8 @@ class ConfigTest extends Specification {
System.setProperty(PREFIX + HTTP_SERVER_ERROR_STATUSES, "1111")
System.setProperty(PREFIX + HTTP_CLIENT_ERROR_STATUSES, "1:1")
System.setProperty(PREFIX + HTTP_CLIENT_HOST_SPLIT_BY_DOMAIN, "invalid")
System.setProperty(PREFIX + PROPAGATION_STYLE_EXTRACT, "some garbage")
System.setProperty(PREFIX + PROPAGATION_STYLE_INJECT, " ")
when:
def config = new Config()
@ -204,6 +287,8 @@ class ConfigTest extends Specification {
config.httpServerErrorStatuses == (500..599).toSet()
config.httpClientErrorStatuses == (400..499).toSet()
config.httpClientSplitByDomain == false
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG]
}
def "sys props and env vars overrides for trace_agent_port and agent_port_legacy as expected"() {
@ -267,6 +352,8 @@ class ConfigTest extends Specification {
properties.setProperty(HTTP_CLIENT_ERROR_STATUSES, "111")
properties.setProperty(HTTP_CLIENT_HOST_SPLIT_BY_DOMAIN, "true")
properties.setProperty(PARTIAL_FLUSH_MIN_SPANS, "15")
properties.setProperty(PROPAGATION_STYLE_EXTRACT, "B3 Datadog")
properties.setProperty(PROPAGATION_STYLE_INJECT, "Datadog B3")
properties.setProperty(JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml")
properties.setProperty(JMX_FETCH_CHECK_PERIOD, "100")
properties.setProperty(JMX_FETCH_REFRESH_BEANS_PERIOD, "200")
@ -292,6 +379,8 @@ class ConfigTest extends Specification {
config.httpClientErrorStatuses == (111..111).toSet()
config.httpClientSplitByDomain == true
config.partialFlushMinSpans == 15
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"]
config.jmxFetchCheckPeriod == 100
config.jmxFetchRefreshBeansPeriod == 200
@ -410,6 +499,7 @@ class ConfigTest extends Specification {
System.setProperty("dd.prop.zero.test", "0")
System.setProperty("dd.prop.float.test", "0.3")
System.setProperty("dd.float.test", "0.4")
System.setProperty("dd.garbage.test", "garbage")
System.setProperty("dd.negative.test", "-1")
expect:
@ -423,6 +513,7 @@ class ConfigTest extends Specification {
"prop.float.test" | 0.3
"float.test" | 0.4
"negative.test" | -1.0
"garbage.test" | 10.0
"default.test" | 10.0
defaultValue = 10.0
@ -433,14 +524,22 @@ class ConfigTest extends Specification {
System.setProperty(PREFIX + SERVICE_MAPPING, mapString)
System.setProperty(PREFIX + SPAN_TAGS, mapString)
System.setProperty(PREFIX + HEADER_TAGS, mapString)
def props = new Properties()
props.setProperty(SERVICE_MAPPING, mapString)
props.setProperty(SPAN_TAGS, mapString)
props.setProperty(HEADER_TAGS, mapString)
when:
def config = new Config()
def propConfig = Config.get(props)
then:
config.serviceMapping == map
config.spanTags == map
config.headerTags == map
propConfig.serviceMapping == map
propConfig.spanTags == map
propConfig.headerTags == map
where:
mapString | map
@ -468,17 +567,25 @@ class ConfigTest extends Specification {
setup:
System.setProperty(PREFIX + HTTP_SERVER_ERROR_STATUSES, value)
System.setProperty(PREFIX + HTTP_CLIENT_ERROR_STATUSES, value)
def props = new Properties()
props.setProperty(HTTP_CLIENT_ERROR_STATUSES, value)
props.setProperty(HTTP_SERVER_ERROR_STATUSES, value)
when:
def config = new Config()
def propConfig = Config.get(props)
then:
if (expected) {
assert config.httpServerErrorStatuses == expected.toSet()
assert config.httpClientErrorStatuses == expected.toSet()
assert propConfig.httpServerErrorStatuses == expected.toSet()
assert propConfig.httpClientErrorStatuses == expected.toSet()
} else {
assert config.httpServerErrorStatuses == Config.DEFAULT_HTTP_SERVER_ERROR_STATUSES
assert config.httpClientErrorStatuses == Config.DEFAULT_HTTP_CLIENT_ERROR_STATUSES
assert propConfig.httpServerErrorStatuses == Config.DEFAULT_HTTP_SERVER_ERROR_STATUSES
assert propConfig.httpClientErrorStatuses == Config.DEFAULT_HTTP_CLIENT_ERROR_STATUSES
}
where:

View File

@ -2,8 +2,8 @@ package datadog.opentracing;
import datadog.opentracing.decorators.AbstractDecorator;
import datadog.opentracing.decorators.DDDecoratorsFactory;
import datadog.opentracing.propagation.DatadogHttpCodec;
import datadog.opentracing.propagation.ExtractedContext;
import datadog.opentracing.propagation.HttpCodec;
import datadog.opentracing.propagation.TagContext;
import datadog.opentracing.scopemanager.ContextualScopeManager;
import datadog.opentracing.scopemanager.ScopeContext;
@ -85,8 +85,8 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
}
});
private final DatadogHttpCodec.Injector injector;
private final DatadogHttpCodec.Extractor extractor;
private final HttpCodec.Injector injector;
private final HttpCodec.Extractor extractor;
/** By default, report to local agent and collect all traces. */
public DDTracer() {
@ -232,8 +232,9 @@ public class DDTracer implements io.opentracing.Tracer, Closeable, datadog.trace
// The JVM is already shutting down.
}
injector = new DatadogHttpCodec.Injector();
extractor = new DatadogHttpCodec.Extractor(taggedHeaders);
// TODO: we have too many constructors, we should move to some sort of builder approach
injector = HttpCodec.createInjector(Config.get());
extractor = HttpCodec.createExtractor(Config.get(), taggedHeaders);
if (this.writer instanceof DDAgentWriter) {
final DDApi api = ((DDAgentWriter) this.writer).getApi();

View File

@ -0,0 +1,149 @@
package datadog.opentracing.propagation;
import static datadog.opentracing.propagation.HttpCodec.ZERO;
import static datadog.opentracing.propagation.HttpCodec.validateUInt64BitsID;
import datadog.opentracing.DDSpanContext;
import datadog.trace.api.sampling.PrioritySampling;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMap;
import java.math.BigInteger;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
/**
* A codec designed for HTTP transport via headers using B3 headers
*
* <p>TODO: there is fair amount of code duplication between DatadogHttpCodec and this class,
* especially in part where TagContext is handled. We may want to refactor that and avoid special
* handling of TagContext in other places (i.e. CompoundExtractor).
*/
@Slf4j
class B3HttpCodec {
private static final String TRACE_ID_KEY = "X-B3-TraceId";
private static final String SPAN_ID_KEY = "X-B3-SpanId";
private static final String SAMPLING_PRIORITY_KEY = "X-B3-Sampled";
private static final String SAMPLING_PRIORITY_ACCEPT = String.valueOf(1);
private static final String SAMPLING_PRIORITY_DROP = String.valueOf(0);
private static final int HEX_RADIX = 16;
private B3HttpCodec() {
// This class should not be created. This also makes code coverage checks happy.
}
public static class Injector implements HttpCodec.Injector {
@Override
public void inject(final DDSpanContext context, final TextMap carrier) {
try {
// TODO: should we better store ids as BigInteger in context to avoid parsing it twice.
final BigInteger traceId = new BigInteger(context.getTraceId());
final BigInteger spanId = new BigInteger(context.getSpanId());
carrier.put(TRACE_ID_KEY, traceId.toString(HEX_RADIX).toLowerCase());
carrier.put(SPAN_ID_KEY, spanId.toString(HEX_RADIX).toLowerCase());
if (context.lockSamplingPriority()) {
carrier.put(
SAMPLING_PRIORITY_KEY, convertSamplingPriority(context.getSamplingPriority()));
}
log.debug("{} - B3 parent context injected", context.getTraceId());
} catch (final NumberFormatException e) {
log.debug(
"Cannot parse context id(s): {} {}", context.getTraceId(), context.getSpanId(), e);
}
}
private String convertSamplingPriority(final int samplingPriority) {
return samplingPriority > 0 ? SAMPLING_PRIORITY_ACCEPT : SAMPLING_PRIORITY_DROP;
}
}
public static class Extractor implements HttpCodec.Extractor {
private final Map<String, String> taggedHeaders;
public Extractor(final Map<String, String> taggedHeaders) {
this.taggedHeaders = new HashMap<>();
for (final Map.Entry<String, String> mapping : taggedHeaders.entrySet()) {
this.taggedHeaders.put(mapping.getKey().trim().toLowerCase(), mapping.getValue());
}
}
@Override
public SpanContext extract(final TextMap carrier) {
try {
Map<String, String> tags = Collections.emptyMap();
String traceId = ZERO;
String spanId = ZERO;
int samplingPriority = PrioritySampling.UNSET;
for (final Map.Entry<String, String> entry : carrier) {
final String key = entry.getKey().toLowerCase();
final String value = entry.getValue();
if (value == null) {
continue;
}
if (TRACE_ID_KEY.equalsIgnoreCase(key)) {
final String trimmedValue;
final int length = value.length();
if (length > 32) {
log.debug("Header {} exceeded max length of 32: {}", TRACE_ID_KEY, value);
traceId = "0";
continue;
} else if (length > 16) {
trimmedValue = value.substring(length - 16);
} else {
trimmedValue = value;
}
traceId = validateUInt64BitsID(trimmedValue, HEX_RADIX);
} else if (SPAN_ID_KEY.equalsIgnoreCase(key)) {
spanId = validateUInt64BitsID(value, HEX_RADIX);
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = convertSamplingPriority(value);
}
if (taggedHeaders.containsKey(key)) {
if (tags.isEmpty()) {
tags = new HashMap<>();
}
tags.put(taggedHeaders.get(key), HttpCodec.decode(value));
}
}
if (!ZERO.equals(traceId)) {
final ExtractedContext context =
new ExtractedContext(
traceId,
spanId,
samplingPriority,
null,
Collections.<String, String>emptyMap(),
tags);
context.lockSamplingPriority();
log.debug("{} - Parent context extracted", context.getTraceId());
return context;
} else if (!tags.isEmpty()) {
log.debug("Tags context extracted");
return new TagContext(null, tags);
}
} catch (final RuntimeException e) {
log.debug("Exception when extracting context", e);
}
return null;
}
private int convertSamplingPriority(final String samplingPriority) {
return Integer.parseInt(samplingPriority) == 1
? PrioritySampling.SAMPLER_KEEP
: PrioritySampling.SAMPLER_DROP;
}
}
}

View File

@ -1,25 +1,20 @@
package datadog.opentracing.propagation;
import static datadog.opentracing.propagation.HttpCodec.ZERO;
import static datadog.opentracing.propagation.HttpCodec.validateUInt64BitsID;
import datadog.opentracing.DDSpanContext;
import datadog.trace.api.sampling.PrioritySampling;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMap;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
/** A codec designed for HTTP transport via headers */
/** A codec designed for HTTP transport via headers using Datadog headers */
@Slf4j
public class DatadogHttpCodec {
// uint 64 bits max value, 2^64 - 1
static final BigInteger BIG_INTEGER_UINT64_MAX =
new BigInteger("2").pow(64).subtract(BigInteger.ONE);
class DatadogHttpCodec {
private static final String OT_BAGGAGE_PREFIX = "ot-baggage-";
private static final String TRACE_ID_KEY = "x-datadog-trace-id";
@ -27,11 +22,16 @@ public class DatadogHttpCodec {
private static final String SAMPLING_PRIORITY_KEY = "x-datadog-sampling-priority";
private static final String ORIGIN_KEY = "x-datadog-origin";
public static class Injector {
private DatadogHttpCodec() {
// This class should not be created. This also makes code coverage checks happy.
}
public static class Injector implements HttpCodec.Injector {
@Override
public void inject(final DDSpanContext context, final TextMap carrier) {
carrier.put(TRACE_ID_KEY, String.valueOf(context.getTraceId()));
carrier.put(SPAN_ID_KEY, String.valueOf(context.getSpanId()));
carrier.put(TRACE_ID_KEY, context.getTraceId());
carrier.put(SPAN_ID_KEY, context.getSpanId());
if (context.lockSamplingPriority()) {
carrier.put(SAMPLING_PRIORITY_KEY, String.valueOf(context.getSamplingPriority()));
}
@ -41,23 +41,13 @@ public class DatadogHttpCodec {
}
for (final Map.Entry<String, String> entry : context.baggageItems()) {
carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), encode(entry.getValue()));
carrier.put(OT_BAGGAGE_PREFIX + entry.getKey(), HttpCodec.encode(entry.getValue()));
}
log.debug("{} - Parent context injected", context.getTraceId());
}
private String encode(final String value) {
String encoded = value;
try {
encoded = URLEncoder.encode(value, "UTF-8");
} catch (final UnsupportedEncodingException e) {
log.info("Failed to encode value - {}", value);
}
return encoded;
log.debug("{} - Datadog parent context injected", context.getTraceId());
}
}
public static class Extractor {
public static class Extractor implements HttpCodec.Extractor {
private final Map<String, String> taggedHeaders;
public Extractor(final Map<String, String> taggedHeaders) {
@ -67,92 +57,63 @@ public class DatadogHttpCodec {
}
}
@Override
public SpanContext extract(final TextMap carrier) {
Map<String, String> baggage = Collections.emptyMap();
Map<String, String> tags = Collections.emptyMap();
String traceId = "0";
String spanId = "0";
int samplingPriority = PrioritySampling.UNSET;
String origin = null;
for (final Map.Entry<String, String> entry : carrier) {
final String key = entry.getKey().toLowerCase();
final String val = entry.getValue();
if (val == null) {
continue;
}
if (TRACE_ID_KEY.equalsIgnoreCase(key)) {
traceId = validateUInt64BitsID(val);
} else if (SPAN_ID_KEY.equalsIgnoreCase(key)) {
spanId = validateUInt64BitsID(val);
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(val);
} else if (ORIGIN_KEY.equalsIgnoreCase(key)) {
origin = val;
} else if (key.startsWith(OT_BAGGAGE_PREFIX)) {
if (baggage.isEmpty()) {
baggage = new HashMap<>();
}
baggage.put(key.replace(OT_BAGGAGE_PREFIX, ""), decode(val));
}
if (taggedHeaders.containsKey(key)) {
if (tags.isEmpty()) {
tags = new HashMap<>();
}
tags.put(taggedHeaders.get(key), decode(val));
}
}
SpanContext context = null;
if (!"0".equals(traceId)) {
final ExtractedContext ctx =
new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
ctx.lockSamplingPriority();
log.debug("{} - Parent context extracted", ctx.getTraceId());
context = ctx;
} else if (origin != null || !tags.isEmpty()) {
context = new TagContext(origin, tags);
}
return context;
}
private String decode(final String value) {
String decoded = value;
try {
decoded = URLDecoder.decode(value, "UTF-8");
} catch (final UnsupportedEncodingException e) {
log.info("Failed to decode value - {}", value);
}
return decoded;
}
Map<String, String> baggage = Collections.emptyMap();
Map<String, String> tags = Collections.emptyMap();
String traceId = ZERO;
String spanId = ZERO;
int samplingPriority = PrioritySampling.UNSET;
String origin = null;
/**
* Helper method to validate an ID String to verify that it is an unsigned 64 bits number and is
* within range.
*
* @param val the String that contains the ID
* @return the ID in String format if it passes validations
* @throws IllegalArgumentException if val is not a number or if the number is out of range
*/
private String validateUInt64BitsID(final String val) throws IllegalArgumentException {
try {
final BigInteger validate = new BigInteger(val);
if (validate.compareTo(BigInteger.ZERO) == -1
|| validate.compareTo(BIG_INTEGER_UINT64_MAX) == 1) {
throw new IllegalArgumentException(
"ID out of range, must be between 0 and 2^64-1, got: " + val);
for (final Map.Entry<String, String> entry : carrier) {
final String key = entry.getKey().toLowerCase();
final String value = entry.getValue();
if (value == null) {
continue;
}
if (TRACE_ID_KEY.equalsIgnoreCase(key)) {
traceId = validateUInt64BitsID(value, 10);
} else if (SPAN_ID_KEY.equalsIgnoreCase(key)) {
spanId = validateUInt64BitsID(value, 10);
} else if (SAMPLING_PRIORITY_KEY.equalsIgnoreCase(key)) {
samplingPriority = Integer.parseInt(value);
} else if (ORIGIN_KEY.equalsIgnoreCase(key)) {
origin = value;
} else if (key.startsWith(OT_BAGGAGE_PREFIX)) {
if (baggage.isEmpty()) {
baggage = new HashMap<>();
}
baggage.put(key.replace(OT_BAGGAGE_PREFIX, ""), HttpCodec.decode(value));
}
if (taggedHeaders.containsKey(key)) {
if (tags.isEmpty()) {
tags = new HashMap<>();
}
tags.put(taggedHeaders.get(key), HttpCodec.decode(value));
}
}
return val;
} catch (final NumberFormatException nfe) {
throw new IllegalArgumentException(
"Expecting a number for trace ID or span ID, but got: " + val, nfe);
if (!ZERO.equals(traceId)) {
final ExtractedContext context =
new ExtractedContext(traceId, spanId, samplingPriority, origin, baggage, tags);
context.lockSamplingPriority();
log.debug("{} - Parent context extracted", context.getTraceId());
return context;
} else if (origin != null || !tags.isEmpty()) {
log.debug("Tags context extracted");
return new TagContext(origin, tags);
}
} catch (final RuntimeException e) {
log.debug("Exception when extracting context", e);
}
return null;
}
}
}

View File

@ -0,0 +1,146 @@
package datadog.opentracing.propagation;
import datadog.opentracing.DDSpanContext;
import datadog.trace.api.Config;
import io.opentracing.SpanContext;
import io.opentracing.propagation.TextMap;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class HttpCodec {
// uint 64 bits max value, 2^64 - 1
static final BigInteger UINT64_MAX = new BigInteger("2").pow(64).subtract(BigInteger.ONE);
static final String ZERO = "0";
public interface Injector {
void inject(final DDSpanContext context, final TextMap carrier);
}
public interface Extractor {
SpanContext extract(final TextMap carrier);
}
public static Injector createInjector(final Config config) {
final List<Injector> injectors = new ArrayList<>();
for (final Config.PropagationStyle style : config.getPropagationStylesToInject()) {
if (style == Config.PropagationStyle.DATADOG) {
injectors.add(new DatadogHttpCodec.Injector());
continue;
}
if (style == Config.PropagationStyle.B3) {
injectors.add(new B3HttpCodec.Injector());
continue;
}
log.debug("No implementation found to inject propagation style: {}", style);
}
return new CompoundInjector(injectors);
}
public static Extractor createExtractor(
final Config config, final Map<String, String> taggedHeaders) {
final List<Extractor> extractors = new ArrayList<>();
for (final Config.PropagationStyle style : config.getPropagationStylesToExtract()) {
if (style == Config.PropagationStyle.DATADOG) {
extractors.add(new DatadogHttpCodec.Extractor(taggedHeaders));
continue;
}
if (style == Config.PropagationStyle.B3) {
extractors.add(new B3HttpCodec.Extractor(taggedHeaders));
continue;
}
log.debug("No implementation found to extract propagation style: {}", style);
}
return new CompoundExtractor(extractors);
}
public static class CompoundInjector implements Injector {
private final List<Injector> injectors;
public CompoundInjector(final List<Injector> injectors) {
this.injectors = injectors;
}
@Override
public void inject(final DDSpanContext context, final TextMap carrier) {
for (final Injector injector : injectors) {
injector.inject(context, carrier);
}
}
}
public static class CompoundExtractor implements Extractor {
private final List<Extractor> extractors;
public CompoundExtractor(final List<Extractor> extractors) {
this.extractors = extractors;
}
@Override
public SpanContext extract(final TextMap carrier) {
SpanContext context = null;
for (final Extractor extractor : extractors) {
context = extractor.extract(carrier);
// Use incomplete TagContext only as last resort
if (context != null && (context instanceof ExtractedContext)) {
return context;
}
}
return context;
}
}
/**
* Helper method to validate an ID String to verify that it is an unsigned 64 bits number and is
* within range.
*
* @param value the String that contains the ID
* @param radix radix to use to parse the ID
* @return the ID in String format if it passes validations, "0" otherwise
* @throws IllegalArgumentException if value cannot be converted to integer or doesn't conform to
* required boundaries
*/
static String validateUInt64BitsID(final String value, final int radix)
throws IllegalArgumentException {
final BigInteger parsedValue = new BigInteger(value, radix);
if (parsedValue.compareTo(BigInteger.ZERO) == -1 || parsedValue.compareTo(UINT64_MAX) == 1) {
throw new IllegalArgumentException(
"ID out of range, must be between 0 and 2^64-1, got: " + value);
}
// We use decimals
return parsedValue.toString();
}
/** URL encode value */
static String encode(final String value) {
String encoded = value;
try {
encoded = URLEncoder.encode(value, "UTF-8");
} catch (final UnsupportedEncodingException e) {
log.info("Failed to encode value - {}", value);
}
return encoded;
}
/** URL decode value */
static String decode(final String value) {
String decoded = value;
try {
decoded = URLDecoder.decode(value, "UTF-8");
} catch (final UnsupportedEncodingException e) {
log.info("Failed to decode value - {}", value);
}
return decoded;
}
}

View File

@ -0,0 +1,132 @@
package datadog.opentracing.propagation
import datadog.trace.api.sampling.PrioritySampling
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Specification
import static datadog.opentracing.propagation.B3HttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.B3HttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.B3HttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class B3HttpExtractorTest extends Specification {
HttpCodec.Extractor extractor = new B3HttpCodec.Extractor(["SOME_HEADER": "some-tag"])
def "extract http headers"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()): traceId.toString(16).toLowerCase(),
(SPAN_ID_KEY.toUpperCase()) : spanId.toString(16).toLowerCase(),
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != null) {
headers.put(SAMPLING_PRIORITY_KEY, "$samplingPriority".toString())
}
when:
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context.traceId == traceId.toString()
context.spanId == spanId.toString()
context.baggage == [:]
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == expectedSamplingPriority
context.origin == null
where:
traceId | spanId | samplingPriority | expectedSamplingPriority
1G | 2G | null | PrioritySampling.UNSET
2G | 3G | 1 | PrioritySampling.SAMPLER_KEEP
3G | 4G | 0 | PrioritySampling.SAMPLER_DROP
UINT64_MAX | UINT64_MAX.minus(1) | 0 | PrioritySampling.SAMPLER_DROP
UINT64_MAX.minus(1) | UINT64_MAX | 1 | PrioritySampling.SAMPLER_KEEP
}
def "extract 128 bit id truncates id to 64 bit"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()): traceId,
(SPAN_ID_KEY.toUpperCase()) : spanId,
]
when:
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
if (expectedTraceId) {
assert context.traceId == expectedTraceId
assert context.spanId == expectedSpanId
} else {
assert context == null
}
where:
traceId | spanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | "0"
"1" | "-1" | null | "0"
"0" | "1" | null | "0"
"00001" | "00001" | "1" | "1"
"463ac35c9f6413ad" | "463ac35c9f6413ad" | "5060571933882717101" | "5060571933882717101"
"463ac35c9f6413ad48485a3953bb6124" | "1" | "5208512171318403364" | "1"
"f".multiply(16) | "1" | "$UINT64_MAX" | "1"
"a".multiply(16) + "f".multiply(16) | "1" | "$UINT64_MAX" | "1"
"1" + "f".multiply(32) | "1" | null | "1"
"0" + "f".multiply(32) | "1" | null | "1"
"1" | "f".multiply(16) | "1" | "$UINT64_MAX"
"1" | "1" + "f".multiply(16) | null | "0"
"1" | "000" + "f".multiply(16) | "1" | "$UINT64_MAX"
}
def "extract header tags with no propagation"() {
when:
TagContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
!(context instanceof ExtractedContext)
context.getTags() == ["some-tag": "my-interesting-info"]
where:
headers | _
[SOME_HEADER: "my-interesting-info"] | _
}
def "extract empty headers returns null"() {
expect:
extractor.extract(new TextMapExtractAdapter(["ignored-header": "ignored-value"])) == null
}
def "extract http headers with invalid non-numeric ID"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()): "traceId",
(SPAN_ID_KEY.toUpperCase()) : "spanId",
SOME_HEADER : "my-interesting-info",
]
when:
SpanContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context == null
}
def "extract http headers with out of range span ID"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()): "0",
(SPAN_ID_KEY.toUpperCase()) : "-1",
SOME_HEADER : "my-interesting-info",
]
when:
SpanContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
context == null
}
}

View File

@ -0,0 +1,109 @@
package datadog.opentracing.propagation
import datadog.opentracing.DDSpanContext
import datadog.opentracing.DDTracer
import datadog.opentracing.PendingTrace
import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.writer.ListWriter
import io.opentracing.propagation.TextMapInjectAdapter
import spock.lang.Specification
import static datadog.opentracing.propagation.B3HttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.B3HttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.B3HttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class B3HttpInjectorTest extends Specification {
HttpCodec.Injector injector = new B3HttpCodec.Injector()
def "inject http headers"() {
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
traceId,
spanId,
"0",
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
"fakeOrigin",
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
tracer)
final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
1 * carrier.put(TRACE_ID_KEY, new BigInteger(traceId).toString(16).toLowerCase())
1 * carrier.put(SPAN_ID_KEY, new BigInteger(spanId).toString(16).toLowerCase())
if (expectedSamplingPriority != null) {
1 * carrier.put(SAMPLING_PRIORITY_KEY, "$expectedSamplingPriority")
}
0 * _
where:
traceId | spanId | samplingPriority | expectedSamplingPriority
"1" | "2" | PrioritySampling.UNSET | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | 1
"4" | "5" | PrioritySampling.SAMPLER_DROP | 0
"5" | "6" | PrioritySampling.USER_KEEP | 1
"6" | "7" | PrioritySampling.USER_DROP | 0
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | null
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | 1
}
def "unparseable ids"() {
setup:
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
traceId,
spanId,
"0",
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
"fakeOrigin",
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
tracer)
final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
0 * _
where:
traceId | spanId | samplingPriority
"abc" | "1" | PrioritySampling.UNSET
"1" | "cbd" | PrioritySampling.SAMPLER_KEEP
}
}

View File

@ -1,57 +1,56 @@
package datadog.opentracing.propagation
import datadog.trace.api.sampling.PrioritySampling
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Specification
import static datadog.opentracing.propagation.DatadogHttpCodec.BIG_INTEGER_UINT64_MAX
import static datadog.opentracing.propagation.DatadogHttpCodec.ORIGIN_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class DatadogHttpExtractorTest extends Specification {
DatadogHttpCodec.Extractor extractor = new DatadogHttpCodec.Extractor(["SOME_HEADER": "some-tag"])
HttpCodec.Extractor extractor = new DatadogHttpCodec.Extractor(["SOME_HEADER": "some-tag"])
def "extract http headers"() {
setup:
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : traceID,
(SPAN_ID_KEY.toUpperCase()) : spanID,
def headers = [
(TRACE_ID_KEY.toUpperCase()) : traceId,
(SPAN_ID_KEY.toUpperCase()) : spanId,
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, "$samplingPriority".toString())
headers.put(SAMPLING_PRIORITY_KEY, "$samplingPriority".toString())
}
if (origin) {
actual.put(ORIGIN_KEY, origin)
headers.put(ORIGIN_KEY, origin)
}
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(actual))
when:
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
expect:
context.traceId == traceID
context.spanId == spanID
context.baggage.get("k1") == "v1"
context.baggage.get("k2") == "v2"
then:
context.traceId == traceId
context.spanId == spanId
context.baggage == ["k1": "v1", "k2": "v2"]
context.tags == ["some-tag": "my-interesting-info"]
context.samplingPriority == samplingPriority
context.origin == origin
where:
traceID | spanID | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"1" | "2" | PrioritySampling.SAMPLER_KEEP | "saipan"
// Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | PrioritySampling.UNSET | "saipan"
"18446744073709551615" | "18446744073709551614" | PrioritySampling.SAMPLER_KEEP | null
BIG_INTEGER_UINT64_MAX.toString() | BIG_INTEGER_UINT64_MAX.minus(1).toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"2" | "3" | PrioritySampling.SAMPLER_KEEP | "saipan"
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | "saipan"
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | "saipan"
}
def "extract header tags with no propagation"() {
@ -62,7 +61,7 @@ class DatadogHttpExtractorTest extends Specification {
!(context instanceof ExtractedContext)
context.getTags() == ["some-tag": "my-interesting-info"]
if (headers.containsKey(ORIGIN_KEY)) {
((TagContext) context).origin == "my-origin"
assert ((TagContext) context).origin == "my-origin"
}
where:
@ -78,35 +77,25 @@ class DatadogHttpExtractorTest extends Specification {
def "extract http headers with invalid non-numeric ID"() {
setup:
final Map<String, String> actual = [
(TRACE_ID_KEY.toUpperCase()) : "traceID",
(SPAN_ID_KEY.toUpperCase()) : "spanID",
def headers = [
(TRACE_ID_KEY.toUpperCase()) : "traceId",
(SPAN_ID_KEY.toUpperCase()) : "spanId",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k2"): "v2",
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
when:
extractor.extract(new TextMapExtractAdapter(actual))
SpanContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
def iae = thrown(IllegalArgumentException)
assert iae.cause instanceof NumberFormatException
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
context == null
}
def "extract http headers with out of range trace ID"() {
setup:
String outOfRangeTraceId = BIG_INTEGER_UINT64_MAX.add(BigInteger.ONE).toString()
final Map<String, String> actual = [
String outOfRangeTraceId = UINT64_MAX.add(BigInteger.ONE).toString()
def headers = [
(TRACE_ID_KEY.toUpperCase()) : outOfRangeTraceId,
(SPAN_ID_KEY.toUpperCase()) : "0",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
@ -114,25 +103,16 @@ class DatadogHttpExtractorTest extends Specification {
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
when:
extractor.extract(new TextMapExtractAdapter(actual))
SpanContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
thrown(IllegalArgumentException)
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
context == null
}
def "extract http headers with out of range span ID"() {
setup:
final Map<String, String> actual = [
def headers = [
(TRACE_ID_KEY.toUpperCase()) : "0",
(SPAN_ID_KEY.toUpperCase()) : "-1",
(OT_BAGGAGE_PREFIX.toUpperCase() + "k1"): "v1",
@ -140,19 +120,43 @@ class DatadogHttpExtractorTest extends Specification {
SOME_HEADER : "my-interesting-info",
]
if (samplingPriority != PrioritySampling.UNSET) {
actual.put(SAMPLING_PRIORITY_KEY, String.valueOf(samplingPriority))
}
when:
extractor.extract(new TextMapExtractAdapter(actual))
SpanContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
thrown(IllegalArgumentException)
context == null
}
def "more ID range validation"() {
setup:
def headers = [
(TRACE_ID_KEY.toUpperCase()): traceId,
(SPAN_ID_KEY.toUpperCase()) : spanId,
]
when:
final ExtractedContext context = extractor.extract(new TextMapExtractAdapter(headers))
then:
if (expectedTraceId) {
assert context.traceId == expectedTraceId
assert context.spanId == expectedSpanId
} else {
assert context == null
}
where:
samplingPriority | _
PrioritySampling.UNSET | _
PrioritySampling.SAMPLER_KEEP | _
gtTraceId | gSpanId | expectedTraceId | expectedSpanId
"-1" | "1" | null | "0"
"1" | "-1" | null | "0"
"0" | "1" | null | "0"
"1" | "0" | "1" | "0"
"$UINT64_MAX" | "1" | "$UINT64_MAX" | "1"
"${UINT64_MAX.plus(1)}" | "1" | null | "1"
"1" | "$UINT64_MAX" | "1" | "$UINT64_MAX"
"1" | "${UINT64_MAX.plus(1)}" | null | "0"
traceId = gtTraceId.toString()
spanId = gSpanId.toString()
}
}

View File

@ -13,10 +13,11 @@ import static datadog.opentracing.propagation.DatadogHttpCodec.OT_BAGGAGE_PREFIX
import static datadog.opentracing.propagation.DatadogHttpCodec.SAMPLING_PRIORITY_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.SPAN_ID_KEY
import static datadog.opentracing.propagation.DatadogHttpCodec.TRACE_ID_KEY
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
class DatadogHttpInjectorTest extends Specification {
DatadogHttpCodec.Injector injector = new DatadogHttpCodec.Injector()
HttpCodec.Injector injector = new DatadogHttpCodec.Injector()
def "inject http headers"() {
setup:
@ -24,8 +25,8 @@ class DatadogHttpInjectorTest extends Specification {
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
traceID,
spanID,
traceId,
spanId,
"0",
"fakeService",
"fakeOperation",
@ -50,8 +51,8 @@ class DatadogHttpInjectorTest extends Specification {
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
1 * carrier.put(TRACE_ID_KEY, traceID)
1 * carrier.put(SPAN_ID_KEY, spanID)
1 * carrier.put(TRACE_ID_KEY, traceId)
1 * carrier.put(SPAN_ID_KEY, spanId)
1 * carrier.put(OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(OT_BAGGAGE_PREFIX + "k2", "v2")
if (samplingPriority != PrioritySampling.UNSET) {
@ -63,11 +64,10 @@ class DatadogHttpInjectorTest extends Specification {
0 * _
where:
traceID | spanID | parentID | samplingPriority | origin
"1" | "2" | "0" | PrioritySampling.UNSET | null
"1" | "2" | "0" | PrioritySampling.SAMPLER_KEEP | "saipan"
// Test with numbers exceeding Long.MAX_VALUE (uint64)
"9523372036854775807" | "15815582334751494918" | "15815582334751494914" | PrioritySampling.UNSET | "saipan"
"18446744073709551615" | "18446744073709551614" | "18446744073709551613" | PrioritySampling.SAMPLER_KEEP | null
traceId | spanId | samplingPriority | origin
"1" | "2" | PrioritySampling.UNSET | null
"1" | "2" | PrioritySampling.SAMPLER_KEEP | "saipan"
UINT64_MAX.toString() | UINT64_MAX.minus(1).toString() | PrioritySampling.UNSET | "saipan"
UINT64_MAX.minus(1).toString() | UINT64_MAX.toString() | PrioritySampling.SAMPLER_KEEP | null
}
}

View File

@ -0,0 +1,82 @@
package datadog.opentracing.propagation
import datadog.trace.api.Config
import io.opentracing.SpanContext
import io.opentracing.propagation.TextMapExtractAdapter
import spock.lang.Shared
import spock.lang.Specification
import static datadog.opentracing.propagation.HttpCodec.UINT64_MAX
import static datadog.trace.api.Config.PropagationStyle.B3
import static datadog.trace.api.Config.PropagationStyle.DATADOG
class HttpExtractorTest extends Specification {
@Shared
String outOfRangeTraceId = UINT64_MAX.add(BigInteger.ONE)
def "extract http headers"() {
setup:
Config config = Mock(Config) {
getPropagationStylesToExtract() >> styles
}
HttpCodec.Extractor extractor = HttpCodec.createExtractor(config, ["SOME_HEADER": "some-tag"])
final Map<String, String> actual = [:]
if (datadogTraceId != null) {
actual.put(DatadogHttpCodec.TRACE_ID_KEY.toUpperCase(), datadogTraceId)
}
if (datadogSpanId != null) {
actual.put(DatadogHttpCodec.SPAN_ID_KEY.toUpperCase(), datadogSpanId)
}
if (b3TraceId != null) {
actual.put(B3HttpCodec.TRACE_ID_KEY.toUpperCase(), b3TraceId)
}
if (b3SpanId != null) {
actual.put(B3HttpCodec.SPAN_ID_KEY.toUpperCase(), b3SpanId)
}
if (putDatadogFields) {
actual.put("SOME_HEADER", "my-interesting-info")
}
when:
final SpanContext context = extractor.extract(new TextMapExtractAdapter(actual))
then:
if (tagContext) {
assert context instanceof TagContext
} else {
if (expectedTraceId == null) {
assert context == null
} else {
assert context.traceId == expectedTraceId
assert context.spanId == expectedSpanId
}
}
if (expectDatadogFields) {
assert context.tags == ["some-tag": "my-interesting-info"]
}
where:
styles | datadogTraceId | datadogSpanId | b3TraceId | b3SpanId | expectedTraceId | expectedSpanId | putDatadogFields | expectDatadogFields | tagContext
[DATADOG, B3] | "1" | "2" | "a" | "b" | "1" | "2" | true | true | false
[DATADOG, B3] | null | null | "a" | "b" | "a" | "b" | false | false | true
[DATADOG, B3] | null | null | "a" | "b" | null | null | true | true | true
[DATADOG] | "1" | "2" | "a" | "b" | "1" | "2" | true | true | false
[B3] | "1" | "2" | "a" | "b" | "10" | "11" | false | false | false
[B3, DATADOG] | "1" | "2" | "a" | "b" | "10" | "11" | false | false | false
[] | "1" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "abc" | "2" | "a" | "b" | "10" | "11" | false | false | false
[DATADOG] | "abc" | "2" | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | outOfRangeTraceId.toString() | "2" | "a" | "b" | "10" | "11" | false | false | false
[DATADOG, B3] | "1" | outOfRangeTraceId.toString() | "a" | "b" | "10" | "11" | false | false | false
[DATADOG] | outOfRangeTraceId.toString() | "2" | "a" | "b" | null | null | false | false | false
[DATADOG] | "1" | outOfRangeTraceId.toString() | "a" | "b" | null | null | false | false | false
[DATADOG, B3] | "1" | "2" | outOfRangeTraceId.toString() | "b" | "1" | "2" | true | false | false
[DATADOG, B3] | "1" | "2" | "a" | outOfRangeTraceId.toString() | "1" | "2" | true | false | false
}
}

View File

@ -0,0 +1,88 @@
package datadog.opentracing.propagation
import datadog.opentracing.DDSpanContext
import datadog.opentracing.DDTracer
import datadog.opentracing.PendingTrace
import datadog.trace.api.Config
import datadog.trace.api.sampling.PrioritySampling
import datadog.trace.common.writer.ListWriter
import io.opentracing.propagation.TextMapInjectAdapter
import spock.lang.Specification
import static datadog.trace.api.Config.PropagationStyle.B3
import static datadog.trace.api.Config.PropagationStyle.DATADOG
class HttpInjectorTest extends Specification {
def "inject http headers"() {
setup:
Config config = Mock(Config) {
getPropagationStylesToInject() >> styles
}
HttpCodec.Injector injector = HttpCodec.createInjector(config)
def traceId = "1"
def spanId = "2"
def writer = new ListWriter()
def tracer = new DDTracer(writer)
final DDSpanContext mockedContext =
new DDSpanContext(
traceId,
spanId,
"0",
"fakeService",
"fakeOperation",
"fakeResource",
samplingPriority,
origin,
new HashMap<String, String>() {
{
put("k1", "v1")
put("k2", "v2")
}
},
false,
"fakeType",
null,
new PendingTrace(tracer, "1", [:]),
tracer)
final Map<String, String> carrier = Mock()
when:
injector.inject(mockedContext, new TextMapInjectAdapter(carrier))
then:
if (styles.contains(DATADOG)) {
1 * carrier.put(DatadogHttpCodec.TRACE_ID_KEY, traceId)
1 * carrier.put(DatadogHttpCodec.SPAN_ID_KEY, spanId)
1 * carrier.put(DatadogHttpCodec.OT_BAGGAGE_PREFIX + "k1", "v1")
1 * carrier.put(DatadogHttpCodec.OT_BAGGAGE_PREFIX + "k2", "v2")
if (samplingPriority != PrioritySampling.UNSET) {
1 * carrier.put(DatadogHttpCodec.SAMPLING_PRIORITY_KEY, "$samplingPriority")
}
if (origin) {
1 * carrier.put(DatadogHttpCodec.ORIGIN_KEY, origin)
}
}
if (styles.contains(B3)) {
1 * carrier.put(B3HttpCodec.TRACE_ID_KEY, traceId)
1 * carrier.put(B3HttpCodec.SPAN_ID_KEY, spanId)
if (samplingPriority != PrioritySampling.UNSET) {
1 * carrier.put(B3HttpCodec.SAMPLING_PRIORITY_KEY, "1")
}
}
0 * _
where:
styles | samplingPriority | origin
[DATADOG, B3] | PrioritySampling.UNSET | null
[DATADOG, B3] | PrioritySampling.SAMPLER_KEEP | "saipan"
[DATADOG] | PrioritySampling.UNSET | null
[DATADOG] | PrioritySampling.SAMPLER_KEEP | "saipan"
[B3] | PrioritySampling.UNSET | null
[B3] | PrioritySampling.SAMPLER_KEEP | "saipan"
[B3, DATADOG] | PrioritySampling.SAMPLER_KEEP | "saipan"
}
}

View File

@ -1,6 +1,7 @@
package datadog.trace
import datadog.opentracing.DDTracer
import datadog.opentracing.propagation.HttpCodec
import datadog.trace.api.Config
import datadog.trace.common.sampling.AllSampler
import datadog.trace.common.sampling.RateByServiceSampler
@ -46,6 +47,9 @@ class DDTracerTest extends Specification {
tracer.writer.toString() == "DDAgentWriter { api=DDApi { tracesUrl=http://localhost:8126/v0.3/traces } }"
tracer.spanContextDecorators.size() == 13
tracer.injector instanceof HttpCodec.CompoundInjector
tracer.extractor instanceof HttpCodec.CompoundExtractor
}
@ -78,7 +82,8 @@ class DDTracerTest extends Specification {
when:
def config = new Config()
def tracer = new DDTracer(config)
def taggedHeaders = tracer.extractor.taggedHeaders
// Datadog extractor gets placed first
def taggedHeaders = tracer.extractor.extractors[0].taggedHeaders
then:
tracer.defaultSpanTags == map