Merge branch 'master' of github.com:DataDog/dd-trace-java into labbati/jdbc-error-glasshfish

This commit is contained in:
Luca Abbati 2019-05-27 15:18:18 +02:00
commit 13cf53827d
No known key found for this signature in database
GPG Key ID: C901DDA2FFE14529
24 changed files with 328 additions and 64 deletions

4
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,4 @@
# Automatically assign the team as a reviewer.
# https://help.github.com/en/articles/about-code-owners
* @DataDog/apm-java

View File

@ -4,7 +4,10 @@ plugins {
apply from: "${rootDir}/gradle/java.gradle" apply from: "${rootDir}/gradle/java.gradle"
dependencies { dependencies {
compile 'com.datadoghq:jmxfetch:0.27.0' compile('com.datadoghq:jmxfetch:0.29.0'){
exclude group: 'org.slf4j', module: 'slf4j-log4j12'
exclude group: 'log4j', module: 'log4j'
}
compile deps.slf4j compile deps.slf4j
compile project(':dd-trace-api') compile project(':dd-trace-api')
} }
@ -32,12 +35,23 @@ tasks.register("submodulesUpdate", Exec) {
group 'Build Setup' group 'Build Setup'
description 'Initializes and updates integrations-core git submodule' description 'Initializes and updates integrations-core git submodule'
commandLine 'git', 'submodule', 'update', '--init', 'integrations-core' commandLine 'git', 'submodule', 'update', '--init', 'integrations-core'
def submoduleHead = file("${project.rootDir}/.git/modules/dd-java-agent/agent-jmxfetch/integrations-core/HEAD")
if (submoduleHead.exists()) {
inputs.file "${project.rootDir}/.git/modules/dd-java-agent/agent-jmxfetch/integrations-core/HEAD"
}
def integrationsCore = file("$projectDir/integrations-core")
outputs.dir integrationsCore
if (integrationsCore.list().length == 0) {
outputs.upToDateWhen { false }
}
} }
tasks.register("copyMetricConfigs", Exec) { tasks.register("copyMetricConfigs", Exec) {
group 'Build Setup' group 'Build Setup'
description 'Copy metrics.yaml files from integrations-core into resources' description 'Copy metrics.yaml files from integrations-core into resources'
commandLine './copy-metric-configs.sh', 'integrations-core', sourceSets.main.output.resourcesDir commandLine './copy-metric-configs.sh', 'integrations-core', sourceSets.main.output.resourcesDir
inputs.dir file("$projectDir/integrations-core")
outputs.dir sourceSets.main.output.resourcesDir
doFirst { doFirst {
// Ensure the resources directory is available. // Ensure the resources directory is available.
file(sourceSets.main.output.resourcesDir).mkdirs() file(sourceSets.main.output.resourcesDir).mkdirs()

@ -1 +1 @@
Subproject commit 3e38b4e75edcee3ca84f022ea50240b0fc0537f2 Subproject commit e6a01f9e885ac9b71c0ffec8c28dc75668570b15

View File

@ -1,5 +1,7 @@
package datadog.trace.agent.jmxfetch; package datadog.trace.agent.jmxfetch;
import static org.datadog.jmxfetch.AppConfig.ACTION_COLLECT;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import datadog.trace.api.Config; import datadog.trace.api.Config;
import java.io.IOException; import java.io.IOException;
@ -16,6 +18,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.datadog.jmxfetch.App; import org.datadog.jmxfetch.App;
import org.datadog.jmxfetch.AppConfig; import org.datadog.jmxfetch.AppConfig;
import org.datadog.jmxfetch.reporter.ReporterFactory;
@Slf4j @Slf4j
public class JMXFetch { public class JMXFetch {
@ -38,6 +41,14 @@ public class JMXFetch {
return; return;
} }
if (!log.isDebugEnabled()
&& System.getProperty("org.slf4j.simpleLogger.log.org.datadog.jmxfetch") == null) {
// Reduce noisiness of jmxfetch logging.
System.setProperty("org.slf4j.simpleLogger.log.org.datadog.jmxfetch", "warn");
}
final String jmxFetchConfigDir = config.getJmxFetchConfigDir();
final List<String> jmxFetchConfigs = config.getJmxFetchConfigs();
final List<String> internalMetricsConfigs = getInternalMetricFiles(); final List<String> internalMetricsConfigs = getInternalMetricFiles();
final List<String> metricsConfigs = config.getJmxFetchMetricsConfigs(); final List<String> metricsConfigs = config.getJmxFetchMetricsConfigs();
final Integer checkPeriod = config.getJmxFetchCheckPeriod(); final Integer checkPeriod = config.getJmxFetchCheckPeriod();
@ -48,7 +59,9 @@ public class JMXFetch {
final String logLevel = getLogLevel(); final String logLevel = getLogLevel();
log.info( log.info(
"JMXFetch config: {} {} {} {} {} {} {} {}", "JMXFetch config: {} {} {} {} {} {} {} {} {} {}",
jmxFetchConfigDir,
jmxFetchConfigs,
internalMetricsConfigs, internalMetricsConfigs,
metricsConfigs, metricsConfigs,
checkPeriod, checkPeriod,
@ -57,17 +70,24 @@ public class JMXFetch {
reporter, reporter,
logLocation, logLocation,
logLevel); logLevel);
final AppConfig appConfig =
AppConfig.create( final AppConfig.AppConfigBuilder configBuilder =
DEFAULT_CONFIGS, AppConfig.builder()
internalMetricsConfigs, .action(ImmutableList.of(ACTION_COLLECT))
metricsConfigs, .confdDirectory(jmxFetchConfigDir)
checkPeriod, .yamlFileList(jmxFetchConfigs)
refreshBeansPeriod, .targetDirectInstances(true)
globalTags, .instanceConfigResources(DEFAULT_CONFIGS)
reporter, .metricConfigResources(internalMetricsConfigs)
logLocation, .metricConfigFiles(metricsConfigs)
logLevel); .refreshBeansPeriod(refreshBeansPeriod)
.globalTags(globalTags)
.reporter(ReporterFactory.getReporter(reporter));
if (checkPeriod != null) {
configBuilder.checkPeriod(checkPeriod);
}
final AppConfig appConfig = configBuilder.build();
final Thread thread = final Thread thread =
new Thread( new Thread(
@ -131,7 +151,7 @@ public class JMXFetch {
for (final String config : split) { for (final String config : split) {
integrationName.clear(); integrationName.clear();
integrationName.add(config.replace(".yaml", "")); integrationName.add(config.replace(".yaml", ""));
if (Config.integrationEnabled(integrationName, false)) { if (Config.jmxFetchIntegrationEnabled(integrationName, false)) {
final URL resource = JMXFetch.class.getResource("metricconfigs/" + config); final URL resource = JMXFetch.class.getResource("metricconfigs/" + config);
result.add(resource.getPath().split("\\.jar!/")[1]); result.add(resource.getPath().split("\\.jar!/")[1]);
} }

View File

@ -3,6 +3,6 @@ init_config:
new_gc_metrics: true new_gc_metrics: true
instances: instances:
- jmx_url: service:jmx:local:/// - jvm_direct: true
conf: name: dd-java-agent default
# Intentionally left empty for now conf: [] # Intentionally left empty for now

View File

@ -121,6 +121,16 @@ public class AgentInstaller {
.or(nameStartsWith("com.intellij.rt.debugger.")) .or(nameStartsWith("com.intellij.rt.debugger."))
.or(nameStartsWith("com.p6spy.")) .or(nameStartsWith("com.p6spy."))
.or(nameStartsWith("com.newrelic.")) .or(nameStartsWith("com.newrelic."))
.or(nameStartsWith("com.dynatrace."))
.or(nameStartsWith("com.jloadtrace."))
.or(nameStartsWith("com.appdynamics."))
.or(nameStartsWith("com.singularity."))
.or(nameStartsWith("com.jinspired."))
.or(nameStartsWith("org.jinspired."))
.or(nameStartsWith("org.apache.log4j."))
.or(nameStartsWith("org.slf4j.").and(not(named("org.slf4j.MDC"))))
.or(nameContains("$JaxbAccessor"))
.or(nameContains("CGLIB$$"))
.or(nameContains("javassist")) .or(nameContains("javassist"))
.or(nameContains(".asm.")) .or(nameContains(".asm."))
.or(nameMatches("com\\.mchange\\.v2\\.c3p0\\..*Proxy")) .or(nameMatches("com\\.mchange\\.v2\\.c3p0\\..*Proxy"))

View File

@ -53,6 +53,7 @@ public class ClassLoaderMatcher {
classesToSkip.add("sun.reflect.DelegatingClassLoader"); classesToSkip.add("sun.reflect.DelegatingClassLoader");
classesToSkip.add("jdk.internal.reflect.DelegatingClassLoader"); classesToSkip.add("jdk.internal.reflect.DelegatingClassLoader");
classesToSkip.add("clojure.lang.DynamicClassLoader"); classesToSkip.add("clojure.lang.DynamicClassLoader");
classesToSkip.add("org.apache.cxf.common.util.ASMHelper$TypeHelperClassLoader");
classesToSkip.add(DatadogClassLoader.class.getName()); classesToSkip.add(DatadogClassLoader.class.getName());
CLASSLOADER_CLASSES_TO_SKIP = Collections.unmodifiableSet(classesToSkip); CLASSLOADER_CLASSES_TO_SKIP = Collections.unmodifiableSet(classesToSkip);
} }

View File

@ -107,9 +107,11 @@ dependencies {
} }
tasks.withType(Test).configureEach { tasks.withType(Test).configureEach {
jvmArgs "-Ddd.writer.type=LogWriter", "-Ddd.service.name=java-app" jvmArgs "-Ddd.service.name=java-agent-tests"
jvmArgs "-Ddatadog.slf4j.simpleLogger.defaultLogLevel=debug" jvmArgs "-Ddd.writer.type=LoggingWriter"
jvmArgs "-Dorg.slf4j.simpleLogger.defaultLogLevel=debug" // Multi-threaded logging seems to be causing deadlocks with Gradle's log capture.
// jvmArgs "-Ddatadog.slf4j.simpleLogger.defaultLogLevel=debug"
// jvmArgs "-Dorg.slf4j.simpleLogger.defaultLogLevel=debug"
doFirst { doFirst {
// Defining here to allow jacoco to be first on the command line. // Defining here to allow jacoco to be first on the command line.

View File

@ -0,0 +1,89 @@
import datadog.opentracing.DDSpan
import datadog.opentracing.scopemanager.ContinuableScope
import datadog.trace.agent.test.AgentTestRunner
import datadog.trace.api.Trace
import io.opentracing.util.GlobalTracer
import java.util.concurrent.ArrayBlockingQueue
import java.util.concurrent.CompletableFuture
import java.util.concurrent.ThreadPoolExecutor
import java.util.concurrent.TimeUnit
import java.util.function.Function
import java.util.function.Supplier
/**
* Note: ideally this should live with the rest of ExecutorInstrumentationTest,
* but this code needs java8 so we put it here for now.
*/
class CompletableFutureTest extends AgentTestRunner {
def "CompletableFuture test"() {
setup:
def pool = new ThreadPoolExecutor(1, 1, 1000, TimeUnit.NANOSECONDS, new ArrayBlockingQueue<Runnable>(1))
def differentPool = new ThreadPoolExecutor(1, 1, 1000, TimeUnit.NANOSECONDS, new ArrayBlockingQueue<Runnable>(1))
def supplier = new Supplier<String>() {
@Override
@Trace(operationName = "supplier")
String get() {
sleep(1000)
return "a"
}
}
def function = new Function<String, String>() {
@Override
@Trace(operationName = "function")
String apply(String s) {
return s + "c"
}
}
def future = new Supplier<CompletableFuture<String>>() {
@Override
@Trace(operationName = "parent")
CompletableFuture<String> get() {
((ContinuableScope) GlobalTracer.get().scopeManager().active()).setAsyncPropagation(true)
return CompletableFuture.supplyAsync(supplier, pool)
.thenCompose({ s -> CompletableFuture.supplyAsync(new AppendingSupplier(s), differentPool) })
.thenApply(function)
}
}.get()
def result = future.get()
TEST_WRITER.waitForTraces(1)
List<DDSpan> trace = TEST_WRITER.get(0)
expect:
result == "abc"
TEST_WRITER.size() == 1
trace.size() == 4
trace.get(0).operationName == "parent"
trace.get(1).operationName == "function"
trace.get(1).parentId == trace.get(0).spanId
trace.get(2).operationName == "appendingSupplier"
trace.get(2).parentId == trace.get(0).spanId
trace.get(3).operationName == "supplier"
trace.get(3).parentId == trace.get(0).spanId
cleanup:
pool?.shutdown()
differentPool?.shutdown()
}
class AppendingSupplier implements Supplier<String> {
String letter
AppendingSupplier(String letter) {
this.letter = letter
}
@Override
@Trace(operationName = "appendingSupplier")
String get() {
return letter + "b"
}
}
}

View File

@ -12,6 +12,11 @@ import org.apache.kafka.clients.producer.ProducerRecord;
public abstract class KafkaDecorator extends ClientDecorator { public abstract class KafkaDecorator extends ClientDecorator {
public static final KafkaDecorator PRODUCER_DECORATE = public static final KafkaDecorator PRODUCER_DECORATE =
new KafkaDecorator() { new KafkaDecorator() {
@Override
protected String service() {
return "kafka";
}
@Override @Override
protected String spanKind() { protected String spanKind() {
return Tags.SPAN_KIND_PRODUCER; return Tags.SPAN_KIND_PRODUCER;
@ -25,6 +30,16 @@ public abstract class KafkaDecorator extends ClientDecorator {
public static final KafkaDecorator CONSUMER_DECORATE = public static final KafkaDecorator CONSUMER_DECORATE =
new KafkaDecorator() { new KafkaDecorator() {
@Override
protected String service() {
/*
Use default service name. Common use-case here is to have consumer span parent
children spans in instrumented application. Since service name is inherited it makes
sense to default that to application service name rather than 'kafka'.
*/
return null;
}
@Override @Override
protected String spanKind() { protected String spanKind() {
return Tags.SPAN_KIND_CONSUMER; return Tags.SPAN_KIND_CONSUMER;
@ -41,11 +56,6 @@ public abstract class KafkaDecorator extends ClientDecorator {
return new String[] {"kafka"}; return new String[] {"kafka"};
} }
@Override
protected String service() {
return "kafka";
}
@Override @Override
protected String component() { protected String component() {
return "java-kafka"; return "java-kafka";

View File

@ -10,6 +10,7 @@ import static net.bytebuddy.matcher.ElementMatchers.takesArgument;
import com.google.auto.service.AutoService; import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.agent.tooling.Instrumenter;
import io.opentracing.Scope; import io.opentracing.Scope;
import io.opentracing.Span;
import io.opentracing.propagation.Format; import io.opentracing.propagation.Format;
import io.opentracing.util.GlobalTracer; import io.opentracing.util.GlobalTracer;
import java.util.Map; import java.util.Map;
@ -70,7 +71,7 @@ public final class KafkaProducerInstrumentation extends Instrumenter.Default {
PRODUCER_DECORATE.afterStart(scope); PRODUCER_DECORATE.afterStart(scope);
PRODUCER_DECORATE.onProduce(scope, record); PRODUCER_DECORATE.onProduce(scope, record);
callback = new ProducerCallback(callback, scope); callback = new ProducerCallback(callback, scope.span());
// Do not inject headers for batch versions below 2 // Do not inject headers for batch versions below 2
// This is how similar check is being done in Kafka client itself: // This is how similar check is being done in Kafka client itself:
@ -115,24 +116,25 @@ public final class KafkaProducerInstrumentation extends Instrumenter.Default {
public static class ProducerCallback implements Callback { public static class ProducerCallback implements Callback {
private final Callback callback; private final Callback callback;
private final Scope scope; private final Span span;
public ProducerCallback(final Callback callback, final Scope scope) { public ProducerCallback(final Callback callback, final Span span) {
this.callback = callback; this.callback = callback;
this.scope = scope; this.span = span;
} }
@Override @Override
public void onCompletion(final RecordMetadata metadata, final Exception exception) { public void onCompletion(final RecordMetadata metadata, final Exception exception) {
PRODUCER_DECORATE.onError(scope, exception); try (final Scope scope = GlobalTracer.get().scopeManager().activate(span, false)) {
try { PRODUCER_DECORATE.onError(span, exception);
if (callback != null) { try {
callback.onCompletion(metadata, exception); if (callback != null) {
callback.onCompletion(metadata, exception);
}
} finally {
PRODUCER_DECORATE.beforeFinish(span);
span.finish();
} }
} finally {
PRODUCER_DECORATE.beforeFinish(scope);
scope.span().finish();
scope.close();
} }
} }
} }

View File

@ -33,6 +33,10 @@ public class TracingIterable implements Iterable<ConsumerRecord> {
private final String operationName; private final String operationName;
private final KafkaDecorator decorator; private final KafkaDecorator decorator;
/**
* Note: this may potentially create problems if this iterator is used from different threads.
* But at the moment we cannot do much about this.
*/
private Scope currentScope; private Scope currentScope;
public TracingIterator( public TracingIterator(

View File

@ -94,7 +94,7 @@ class KafkaClientTest extends AgentTestRunner {
trace(1, 1) { trace(1, 1) {
// CONSUMER span 0 // CONSUMER span 0
span(0) { span(0) {
serviceName "kafka" serviceName "unnamed-java-app"
operationName "kafka.consume" operationName "kafka.consume"
resourceName "Consume Topic $SHARED_TOPIC" resourceName "Consume Topic $SHARED_TOPIC"
spanType "queue" spanType "queue"

View File

@ -80,13 +80,13 @@ class KafkaStreamsTest extends AgentTestRunner {
KStream<String, String> textLines = builder.stream(STREAM_PENDING) KStream<String, String> textLines = builder.stream(STREAM_PENDING)
def values = textLines def values = textLines
.mapValues(new ValueMapper<String, String>() { .mapValues(new ValueMapper<String, String>() {
@Override @Override
String apply(String textLine) { String apply(String textLine) {
TEST_WRITER.waitForTraces(1) // ensure consistent ordering of traces TEST_WRITER.waitForTraces(1) // ensure consistent ordering of traces
getTestTracer().activeSpan().setTag("asdf", "testing") getTestTracer().activeSpan().setTag("asdf", "testing")
return textLine.toLowerCase() return textLine.toLowerCase()
} }
}) })
KafkaStreams streams KafkaStreams streams
try { try {
@ -172,7 +172,7 @@ class KafkaStreamsTest extends AgentTestRunner {
trace(2, 1) { trace(2, 1) {
// CONSUMER span 0 // CONSUMER span 0
span(0) { span(0) {
serviceName "kafka" serviceName "unnamed-java-app"
operationName "kafka.consume" operationName "kafka.consume"
resourceName "Consume Topic $STREAM_PROCESSED" resourceName "Consume Topic $STREAM_PROCESSED"
spanType "queue" spanType "queue"

View File

@ -27,6 +27,16 @@ public class RabbitDecorator extends ClientDecorator {
public static final RabbitDecorator CONSUMER_DECORATE = public static final RabbitDecorator CONSUMER_DECORATE =
new RabbitDecorator() { new RabbitDecorator() {
@Override
protected String service() {
/*
Use default service name. Common use-case here is to have consumer span parent
children spans in instrumented application. Since service name is inherited it makes
sense to default that to application service name rather than 'rabbitmq'.
*/
return null;
}
@Override @Override
protected String spanKind() { protected String spanKind() {
return Tags.SPAN_KIND_CONSUMER; return Tags.SPAN_KIND_CONSUMER;

View File

@ -341,7 +341,14 @@ class RabbitMQTest extends AgentTestRunner {
String errorMsg = null String errorMsg = null
) { ) {
trace.span(index) { trace.span(index) {
serviceName "rabbitmq" switch (span.tags["amqp.command"]) {
case "basic.get":
case "basic.deliver":
serviceName "unnamed-java-app"
break
default:
serviceName "rabbitmq"
}
operationName "amqp.command" operationName "amqp.command"
resourceName resource resourceName resource
switch (span.tags["amqp.command"]) { switch (span.tags["amqp.command"]) {

View File

@ -14,6 +14,7 @@ import static net.bytebuddy.matcher.ElementMatchers.takesArguments;
import com.google.auto.service.AutoService; import com.google.auto.service.AutoService;
import datadog.trace.agent.tooling.Instrumenter; import datadog.trace.agent.tooling.Instrumenter;
import datadog.trace.context.TraceScope;
import io.opentracing.Scope; import io.opentracing.Scope;
import io.opentracing.util.GlobalTracer; import io.opentracing.util.GlobalTracer;
import java.lang.reflect.Method; import java.lang.reflect.Method;
@ -107,6 +108,9 @@ public final class HandlerAdapterInstrumentation extends Instrumenter.Default {
final String operationName = DECORATE.spanNameForClass(clazz) + "." + methodName; final String operationName = DECORATE.spanNameForClass(clazz) + "." + methodName;
final Scope scope = GlobalTracer.get().buildSpan(operationName).startActive(true); final Scope scope = GlobalTracer.get().buildSpan(operationName).startActive(true);
if (scope instanceof TraceScope) {
((TraceScope) scope).setAsyncPropagation(true);
}
DECORATE.afterStart(scope); DECORATE.afterStart(scope);
return scope; return scope;
} }

View File

@ -52,7 +52,7 @@ class JMXFetchTest extends Specification {
def "test jmxfetch config"() { def "test jmxfetch config"() {
setup: setup:
names.each { names.each {
System.setProperty("dd.integration.${it}.enabled", "$enable") System.setProperty("dd.jmxfetch.${it}.enabled", "$enable")
} }
def classLoader = IntegrationTestUtils.getJmxFetchClassLoader() def classLoader = IntegrationTestUtils.getJmxFetchClassLoader()
// Have to set this so JMXFetch knows where to find resources // Have to set this so JMXFetch knows where to find resources

View File

@ -17,6 +17,8 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.jar.Attributes; import java.util.jar.Attributes;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
import java.util.jar.JarOutputStream; import java.util.jar.JarOutputStream;
@ -201,7 +203,8 @@ public class IntegrationTestUtils {
final ProcessBuilder processBuilder = new ProcessBuilder(commands.toArray(new String[0])); final ProcessBuilder processBuilder = new ProcessBuilder(commands.toArray(new String[0]));
processBuilder.environment().putAll(envVars); processBuilder.environment().putAll(envVars);
final Process process = processBuilder.start(); final Process process = processBuilder.start();
final int result = process.waitFor();
waitFor(process, 30, TimeUnit.SECONDS);
if (printOutputStreams) { if (printOutputStreams) {
final BufferedReader stdInput = final BufferedReader stdInput =
@ -221,6 +224,25 @@ public class IntegrationTestUtils {
} }
System.out.println("--- stderr end ---"); System.out.println("--- stderr end ---");
} }
return result; return process.exitValue();
}
private static void waitFor(final Process process, final long timeout, final TimeUnit unit)
throws InterruptedException, TimeoutException {
final long startTime = System.nanoTime();
long rem = unit.toNanos(timeout);
do {
try {
process.exitValue();
return;
} catch (final IllegalThreadStateException ex) {
if (rem > 0) {
Thread.sleep(Math.min(TimeUnit.NANOSECONDS.toMillis(rem) + 1, 100));
}
}
rem = unit.toNanos(timeout) - (System.nanoTime() - startTime);
} while (rem > 0);
throw new TimeoutException();
} }
} }

View File

@ -65,6 +65,8 @@ public class Config {
public static final String PROPAGATION_STYLE_INJECT = "propagation.style.inject"; public static final String PROPAGATION_STYLE_INJECT = "propagation.style.inject";
public static final String JMX_FETCH_ENABLED = "jmxfetch.enabled"; public static final String JMX_FETCH_ENABLED = "jmxfetch.enabled";
public static final String JMX_FETCH_CONFIG_DIR = "jmxfetch.config.dir";
public static final String JMX_FETCH_CONFIG = "jmxfetch.config";
public static final String JMX_FETCH_METRICS_CONFIGS = "jmxfetch.metrics-configs"; public static final String JMX_FETCH_METRICS_CONFIGS = "jmxfetch.metrics-configs";
public static final String JMX_FETCH_CHECK_PERIOD = "jmxfetch.check-period"; public static final String JMX_FETCH_CHECK_PERIOD = "jmxfetch.check-period";
public static final String JMX_FETCH_REFRESH_BEANS_PERIOD = "jmxfetch.refresh-beans-period"; public static final String JMX_FETCH_REFRESH_BEANS_PERIOD = "jmxfetch.refresh-beans-period";
@ -100,7 +102,7 @@ public class Config {
private static final int DEFAULT_PARTIAL_FLUSH_MIN_SPANS = 1000; private static final int DEFAULT_PARTIAL_FLUSH_MIN_SPANS = 1000;
private static final String DEFAULT_PROPAGATION_STYLE_EXTRACT = PropagationStyle.DATADOG.name(); private static final String DEFAULT_PROPAGATION_STYLE_EXTRACT = PropagationStyle.DATADOG.name();
private static final String DEFAULT_PROPAGATION_STYLE_INJECT = PropagationStyle.DATADOG.name(); private static final String DEFAULT_PROPAGATION_STYLE_INJECT = PropagationStyle.DATADOG.name();
private static final boolean DEFAULT_JMX_FETCH_ENABLED = false; private static final boolean DEFAULT_JMX_FETCH_ENABLED = true;
public static final int DEFAULT_JMX_FETCH_STATSD_PORT = 8125; public static final int DEFAULT_JMX_FETCH_STATSD_PORT = 8125;
@ -147,7 +149,9 @@ public class Config {
@Getter private final Set<PropagationStyle> propagationStylesToInject; @Getter private final Set<PropagationStyle> propagationStylesToInject;
@Getter private final boolean jmxFetchEnabled; @Getter private final boolean jmxFetchEnabled;
@Getter private final List<String> jmxFetchMetricsConfigs; @Getter private final String jmxFetchConfigDir;
@Getter private final List<String> jmxFetchConfigs;
@Deprecated @Getter private final List<String> jmxFetchMetricsConfigs;
@Getter private final Integer jmxFetchCheckPeriod; @Getter private final Integer jmxFetchCheckPeriod;
@Getter private final Integer jmxFetchRefreshBeansPeriod; @Getter private final Integer jmxFetchRefreshBeansPeriod;
@Getter private final String jmxFetchStatsdHost; @Getter private final String jmxFetchStatsdHost;
@ -220,6 +224,8 @@ public class Config {
jmxFetchEnabled = jmxFetchEnabled =
getBooleanSettingFromEnvironment(JMX_FETCH_ENABLED, DEFAULT_JMX_FETCH_ENABLED); getBooleanSettingFromEnvironment(JMX_FETCH_ENABLED, DEFAULT_JMX_FETCH_ENABLED);
jmxFetchConfigDir = getSettingFromEnvironment(JMX_FETCH_CONFIG_DIR, null);
jmxFetchConfigs = getListSettingFromEnvironment(JMX_FETCH_CONFIG, null);
jmxFetchMetricsConfigs = getListSettingFromEnvironment(JMX_FETCH_METRICS_CONFIGS, null); jmxFetchMetricsConfigs = getListSettingFromEnvironment(JMX_FETCH_METRICS_CONFIGS, null);
jmxFetchCheckPeriod = getIntegerSettingFromEnvironment(JMX_FETCH_CHECK_PERIOD, null); jmxFetchCheckPeriod = getIntegerSettingFromEnvironment(JMX_FETCH_CHECK_PERIOD, null);
jmxFetchRefreshBeansPeriod = jmxFetchRefreshBeansPeriod =
@ -300,6 +306,8 @@ public class Config {
jmxFetchEnabled = jmxFetchEnabled =
getPropertyBooleanValue(properties, JMX_FETCH_ENABLED, parent.jmxFetchEnabled); getPropertyBooleanValue(properties, JMX_FETCH_ENABLED, parent.jmxFetchEnabled);
jmxFetchConfigDir = properties.getProperty(JMX_FETCH_CONFIG_DIR, parent.jmxFetchConfigDir);
jmxFetchConfigs = getPropertyListValue(properties, JMX_FETCH_CONFIG, parent.jmxFetchConfigs);
jmxFetchMetricsConfigs = jmxFetchMetricsConfigs =
getPropertyListValue(properties, JMX_FETCH_METRICS_CONFIGS, parent.jmxFetchMetricsConfigs); getPropertyListValue(properties, JMX_FETCH_METRICS_CONFIGS, parent.jmxFetchMetricsConfigs);
jmxFetchCheckPeriod = jmxFetchCheckPeriod =
@ -326,7 +334,7 @@ public class Config {
final Map<String, String> result = new HashMap<>(runtimeTags); final Map<String, String> result = new HashMap<>(runtimeTags);
if (reportHostName) { if (reportHostName) {
String hostName = getHostName(); final String hostName = getHostName();
if (null != hostName && !hostName.isEmpty()) { if (null != hostName && !hostName.isEmpty()) {
result.put(INTERNAL_HOST_NAME, hostName); result.put(INTERNAL_HOST_NAME, hostName);
} }
@ -391,6 +399,23 @@ public class Config {
return anyEnabled; return anyEnabled;
} }
public static boolean jmxFetchIntegrationEnabled(
final SortedSet<String> integrationNames, final boolean defaultEnabled) {
// If default is enabled, we want to enable individually,
// if default is disabled, we want to disable individually.
boolean anyEnabled = defaultEnabled;
for (final String name : integrationNames) {
final boolean configEnabled =
getBooleanSettingFromEnvironment("jmxfetch." + name + ".enabled", defaultEnabled);
if (defaultEnabled) {
anyEnabled &= configEnabled;
} else {
anyEnabled |= configEnabled;
}
}
return anyEnabled;
}
public static boolean traceAnalyticsIntegrationEnabled( public static boolean traceAnalyticsIntegrationEnabled(
final SortedSet<String> integrationNames, final boolean defaultEnabled) { final SortedSet<String> integrationNames, final boolean defaultEnabled) {
// If default is enabled, we want to enable individually, // If default is enabled, we want to enable individually,
@ -674,7 +699,7 @@ public class Config {
private String getHostName() { private String getHostName() {
try { try {
return InetAddress.getLocalHost().getHostName(); return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) { } catch (final UnknownHostException e) {
// If we are not able to detect the hostname we do not throw an exception. // If we are not able to detect the hostname we do not throw an exception.
} }

View File

@ -35,8 +35,8 @@ import static datadog.trace.api.Config.SERVICE_MAPPING
import static datadog.trace.api.Config.SERVICE_NAME import static datadog.trace.api.Config.SERVICE_NAME
import static datadog.trace.api.Config.SPAN_TAGS import static datadog.trace.api.Config.SPAN_TAGS
import static datadog.trace.api.Config.TRACE_AGENT_PORT import static datadog.trace.api.Config.TRACE_AGENT_PORT
import static datadog.trace.api.Config.TRACE_REPORT_HOSTNAME
import static datadog.trace.api.Config.TRACE_ENABLED import static datadog.trace.api.Config.TRACE_ENABLED
import static datadog.trace.api.Config.TRACE_REPORT_HOSTNAME
import static datadog.trace.api.Config.TRACE_RESOLVER_ENABLED import static datadog.trace.api.Config.TRACE_RESOLVER_ENABLED
import static datadog.trace.api.Config.WRITER_TYPE import static datadog.trace.api.Config.WRITER_TYPE
@ -84,7 +84,7 @@ class ConfigTest extends Specification {
config.runtimeContextFieldInjection == true config.runtimeContextFieldInjection == true
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG] config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG] config.propagationStylesToInject.toList() == [Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == false config.jmxFetchEnabled == true
config.jmxFetchMetricsConfigs == [] config.jmxFetchMetricsConfigs == []
config.jmxFetchCheckPeriod == null config.jmxFetchCheckPeriod == null
config.jmxFetchRefreshBeansPeriod == null config.jmxFetchRefreshBeansPeriod == null
@ -125,7 +125,7 @@ class ConfigTest extends Specification {
prop.setProperty(RUNTIME_CONTEXT_FIELD_INJECTION, "false") prop.setProperty(RUNTIME_CONTEXT_FIELD_INJECTION, "false")
prop.setProperty(PROPAGATION_STYLE_EXTRACT, "Datadog, B3") prop.setProperty(PROPAGATION_STYLE_EXTRACT, "Datadog, B3")
prop.setProperty(PROPAGATION_STYLE_INJECT, "B3, Datadog") prop.setProperty(PROPAGATION_STYLE_INJECT, "B3, Datadog")
prop.setProperty(JMX_FETCH_ENABLED, "true") prop.setProperty(JMX_FETCH_ENABLED, "false")
prop.setProperty(JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml") prop.setProperty(JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml")
prop.setProperty(JMX_FETCH_CHECK_PERIOD, "100") prop.setProperty(JMX_FETCH_CHECK_PERIOD, "100")
prop.setProperty(JMX_FETCH_REFRESH_BEANS_PERIOD, "200") prop.setProperty(JMX_FETCH_REFRESH_BEANS_PERIOD, "200")
@ -156,7 +156,7 @@ class ConfigTest extends Specification {
config.runtimeContextFieldInjection == false config.runtimeContextFieldInjection == false
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3] config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG] config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == true config.jmxFetchEnabled == false
config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"] config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"]
config.jmxFetchCheckPeriod == 100 config.jmxFetchCheckPeriod == 100
config.jmxFetchRefreshBeansPeriod == 200 config.jmxFetchRefreshBeansPeriod == 200
@ -188,7 +188,7 @@ class ConfigTest extends Specification {
System.setProperty(PREFIX + RUNTIME_CONTEXT_FIELD_INJECTION, "false") System.setProperty(PREFIX + RUNTIME_CONTEXT_FIELD_INJECTION, "false")
System.setProperty(PREFIX + PROPAGATION_STYLE_EXTRACT, "Datadog, B3") System.setProperty(PREFIX + PROPAGATION_STYLE_EXTRACT, "Datadog, B3")
System.setProperty(PREFIX + PROPAGATION_STYLE_INJECT, "B3, Datadog") System.setProperty(PREFIX + PROPAGATION_STYLE_INJECT, "B3, Datadog")
System.setProperty(PREFIX + JMX_FETCH_ENABLED, "true") System.setProperty(PREFIX + JMX_FETCH_ENABLED, "false")
System.setProperty(PREFIX + JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml") System.setProperty(PREFIX + JMX_FETCH_METRICS_CONFIGS, "/foo.yaml,/bar.yaml")
System.setProperty(PREFIX + JMX_FETCH_CHECK_PERIOD, "100") System.setProperty(PREFIX + JMX_FETCH_CHECK_PERIOD, "100")
System.setProperty(PREFIX + JMX_FETCH_REFRESH_BEANS_PERIOD, "200") System.setProperty(PREFIX + JMX_FETCH_REFRESH_BEANS_PERIOD, "200")
@ -219,7 +219,7 @@ class ConfigTest extends Specification {
config.runtimeContextFieldInjection == false config.runtimeContextFieldInjection == false
config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3] config.propagationStylesToExtract.toList() == [Config.PropagationStyle.DATADOG, Config.PropagationStyle.B3]
config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG] config.propagationStylesToInject.toList() == [Config.PropagationStyle.B3, Config.PropagationStyle.DATADOG]
config.jmxFetchEnabled == true config.jmxFetchEnabled == false
config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"] config.jmxFetchMetricsConfigs == ["/foo.yaml", "/bar.yaml"]
config.jmxFetchCheckPeriod == 100 config.jmxFetchCheckPeriod == 100
config.jmxFetchRefreshBeansPeriod == 200 config.jmxFetchRefreshBeansPeriod == 200
@ -478,6 +478,40 @@ class ConfigTest extends Specification {
integrationNames = new TreeSet<>(names) integrationNames = new TreeSet<>(names)
} }
def "verify integration jmxfetch config"() {
setup:
environmentVariables.set("DD_JMXFETCH_ORDER_ENABLED", "false")
environmentVariables.set("DD_JMXFETCH_TEST_ENV_ENABLED", "true")
environmentVariables.set("DD_JMXFETCH_DISABLED_ENV_ENABLED", "false")
System.setProperty("dd.jmxfetch.order.enabled", "true")
System.setProperty("dd.jmxfetch.test-prop.enabled", "true")
System.setProperty("dd.jmxfetch.disabled-prop.enabled", "false")
expect:
Config.jmxFetchIntegrationEnabled(integrationNames, defaultEnabled) == expected
where:
names | defaultEnabled | expected
[] | true | true
[] | false | false
["invalid"] | true | true
["invalid"] | false | false
["test-prop"] | false | true
["test-env"] | false | true
["disabled-prop"] | true | false
["disabled-env"] | true | false
["other", "test-prop"] | false | true
["other", "test-env"] | false | true
["order"] | false | true
["test-prop", "disabled-prop"] | false | true
["disabled-env", "test-env"] | false | true
["test-prop", "disabled-prop"] | true | false
["disabled-env", "test-env"] | true | false
integrationNames = new TreeSet<>(names)
}
def "verify integration trace analytics config"() { def "verify integration trace analytics config"() {
setup: setup:
environmentVariables.set("DD_ORDER_ANALYTICS_ENABLED", "false") environmentVariables.set("DD_ORDER_ANALYTICS_ENABLED", "false")

View File

@ -3,6 +3,7 @@ plugins {
id 'com.jfrog.artifactory' version '4.8.1' id 'com.jfrog.artifactory' version '4.8.1'
id 'com.jfrog.bintray' version '1.8.4' id 'com.jfrog.bintray' version '1.8.4'
id 'org.unbroken-dome.test-sets' version '2.1.1' id 'org.unbroken-dome.test-sets' version '2.1.1'
id 'com.github.ben-manes.versions' version '0.21.0'
id 'com.gradle.build-scan' version '2.2.1' id 'com.gradle.build-scan' version '2.2.1'
// Not applying google java format by default because it gets confused by stray java build // Not applying google java format by default because it gets confused by stray java build

View File

@ -159,7 +159,7 @@ public class DDApi {
} else if (nextAllowedLogTime < System.currentTimeMillis()) { } else if (nextAllowedLogTime < System.currentTimeMillis()) {
nextAllowedLogTime = System.currentTimeMillis() + MILLISECONDS_BETWEEN_ERROR_LOG; nextAllowedLogTime = System.currentTimeMillis() + MILLISECONDS_BETWEEN_ERROR_LOG;
log.warn( log.warn(
"Error while sending {} of {} traces to the DD agent. Status: {} (going silent for {} minutes)", "Error while sending {} of {} traces to the DD agent. Status: {} {} (going silent for {} minutes)",
traces.size(), traces.size(),
representativeCount, representativeCount,
response.code(), response.code(),

View File

@ -1,3 +1,5 @@
import java.time.Duration
apply plugin: 'java' apply plugin: 'java'
apply plugin: 'groovy' apply plugin: 'groovy'
@ -301,8 +303,11 @@ for (def env : System.getenv().entrySet()) {
} }
} }
// Disable all tests if skipTests property was specified
tasks.withType(Test).configureEach { tasks.withType(Test).configureEach {
// All tests must complete within 2 minutes.
timeout = Duration.ofMinutes(2)
// Disable all tests if skipTests property was specified
onlyIf { !project.rootProject.hasProperty("skipTests") } onlyIf { !project.rootProject.hasProperty("skipTests") }
} }