Initialize file configuration (#5399)
This commit is contained in:
parent
c5c9fdd882
commit
90ab3665dc
|
|
@ -47,6 +47,7 @@ dependencyCheck {
|
|||
"annotationProcessor",
|
||||
"animalsniffer",
|
||||
"spotless-1972451482", // spotless-1972451482 is a weird configuration that's only added in jaeger-proto
|
||||
"js2p",
|
||||
"jmhAnnotationProcessor",
|
||||
"jmhCompileClasspath",
|
||||
"jmhRuntimeClasspath",
|
||||
|
|
|
|||
|
|
@ -1,8 +1,13 @@
|
|||
import de.undercouch.gradle.tasks.download.Download
|
||||
|
||||
plugins {
|
||||
id("otel.java-conventions")
|
||||
id("otel.publish-conventions")
|
||||
|
||||
id("otel.animalsniffer-conventions")
|
||||
|
||||
id("de.undercouch.download")
|
||||
id("org.jsonschema2pojo")
|
||||
}
|
||||
|
||||
// SDK modules that are still being developed.
|
||||
|
|
@ -19,8 +24,112 @@ dependencies {
|
|||
implementation(project(":sdk-extensions:autoconfigure-spi"))
|
||||
implementation("org.snakeyaml:snakeyaml-engine")
|
||||
|
||||
// io.opentelemetry.sdk.extension.incubator.fileconfig
|
||||
implementation("com.fasterxml.jackson.core:jackson-databind")
|
||||
implementation("com.fasterxml.jackson.dataformat:jackson-dataformat-yaml")
|
||||
implementation("org.yaml:snakeyaml:1.31")
|
||||
|
||||
testImplementation(project(":sdk:testing"))
|
||||
testImplementation(project(":sdk-extensions:autoconfigure"))
|
||||
|
||||
testImplementation("com.google.guava:guava-testlib")
|
||||
}
|
||||
|
||||
// The following tasks download the JSON Schema files from open-telemetry/opentelemetry-configuration and generate classes from the type definitions which are used with jackson-databind to parse JSON / YAML to the configuration schema.
|
||||
// The sequence of tasks is:
|
||||
// 1. downloadConfigurationSchema - download configuration schema from open-telemetry/opentelemetry-configuration
|
||||
// 2. unzipConfigurationSchema - unzip the configuration schema archive contents to $buildDir/configuration/
|
||||
// 3. generateJsonSchema2Pojo - generate java POJOs from the configuration schema
|
||||
// 4. jsonSchema2PojoPostProcessing - perform various post processing on the generated POJOs, e.g. replace javax.annotation.processing.Generated with javax.annotation.Generated, add @SuppressWarning("rawtypes") annotation
|
||||
// 5. overwriteJs2p - overwrite original generated classes with versions containing updated @Generated annotation
|
||||
// 6. deleteJs2pTmp - delete tmp directory
|
||||
// ... proceed with normal sourcesJar, compileJava, etc
|
||||
|
||||
// TODO(jack-berg): update ref to be released version when available
|
||||
val configurationRef = "2107dbb6f2a6c99fe2f55d550796ee7e2286fd1d"
|
||||
val configurationRepoZip = "https://github.com/open-telemetry/opentelemetry-configuration/archive/$configurationRef.zip"
|
||||
|
||||
val downloadConfigurationSchema by tasks.registering(Download::class) {
|
||||
src(configurationRepoZip)
|
||||
dest("$buildDir/configuration/opentelemetry-configuration.zip")
|
||||
overwrite(false)
|
||||
}
|
||||
|
||||
val unzipConfigurationSchema by tasks.registering(Copy::class) {
|
||||
dependsOn(downloadConfigurationSchema)
|
||||
|
||||
from(zipTree(downloadConfigurationSchema.get().dest))
|
||||
eachFile(closureOf<FileCopyDetails> {
|
||||
// Remove the top level folder "/opentelemetry-configuration-$configurationRef"
|
||||
val pathParts = path.split("/")
|
||||
path = pathParts.subList(1, pathParts.size).joinToString("/")
|
||||
})
|
||||
into("$buildDir/configuration/")
|
||||
}
|
||||
|
||||
jsonSchema2Pojo {
|
||||
sourceFiles = setOf(file("$buildDir/configuration/schema"))
|
||||
targetDirectory = file("$buildDir/generated/sources/js2p/java/main")
|
||||
targetPackage = "io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model"
|
||||
|
||||
// Clear old source files to avoid contaminated source dir when updating
|
||||
removeOldOutput = true
|
||||
|
||||
// Prefer builders to setters
|
||||
includeSetters = false
|
||||
generateBuilders = true
|
||||
|
||||
// Use title field to generate class name, instead of default which is based on filename / propertynames
|
||||
useTitleAsClassname = true
|
||||
|
||||
// Force java 9+ @Generated annotation, since java 8 @Generated annotation isn't detected by
|
||||
// jsonSchema2Pojo and annotation is skipped altogether
|
||||
targetVersion = "1.9"
|
||||
}
|
||||
|
||||
val generateJsonSchema2Pojo = tasks.getByName("generateJsonSchema2Pojo")
|
||||
generateJsonSchema2Pojo.dependsOn(unzipConfigurationSchema)
|
||||
|
||||
val jsonSchema2PojoPostProcessing by tasks.registering(Copy::class) {
|
||||
dependsOn(generateJsonSchema2Pojo)
|
||||
|
||||
from("$buildDir/generated/sources/js2p")
|
||||
into("$buildDir/generated/sources/js2p-tmp")
|
||||
filter {
|
||||
it
|
||||
// Replace java 9+ @Generated annotation with java 8 version
|
||||
.replace("import javax.annotation.processing.Generated", "import javax.annotation.Generated")
|
||||
// Add @SuppressWarnings("rawtypes") annotation to address raw types used in jsonschema2pojo builders
|
||||
.replace("@Generated(\"jsonschema2pojo\")", "@Generated(\"jsonschema2pojo\")\n@SuppressWarnings(\"rawtypes\")")
|
||||
}
|
||||
}
|
||||
val overwriteJs2p by tasks.registering(Copy::class) {
|
||||
dependsOn(jsonSchema2PojoPostProcessing)
|
||||
|
||||
from("$buildDir/generated/sources/js2p-tmp")
|
||||
into("$buildDir/generated/sources/js2p")
|
||||
}
|
||||
val deleteJs2pTmp by tasks.registering(Delete::class) {
|
||||
dependsOn(overwriteJs2p)
|
||||
|
||||
delete("$buildDir/generated/sources/js2p-tmp/")
|
||||
}
|
||||
|
||||
tasks.getByName("compileJava").dependsOn(deleteJs2pTmp)
|
||||
tasks.getByName("sourcesJar").dependsOn(deleteJs2pTmp)
|
||||
|
||||
// Exclude jsonschema2pojo generated sources from checkstyle
|
||||
tasks.named<Checkstyle>("checkstyleMain") {
|
||||
exclude("**/fileconfig/internal/model/**")
|
||||
}
|
||||
|
||||
tasks {
|
||||
withType<Test>().configureEach {
|
||||
environment(
|
||||
mapOf(
|
||||
// Expose the kitchen sink example file to tests
|
||||
"CONFIG_EXAMPLE_DIR" to "$buildDir/configuration/examples/"
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
package io.opentelemetry.sdk.extension.incubator.fileconfig;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration;
|
||||
import java.io.InputStream;
|
||||
import org.yaml.snakeyaml.Yaml;
|
||||
|
||||
class ConfigurationReader {
|
||||
|
||||
private static final ObjectMapper MAPPER = new ObjectMapper();
|
||||
private static final Yaml YAML = new Yaml();
|
||||
|
||||
private ConfigurationReader() {}
|
||||
|
||||
/** Parse the {@code configuration} YAML and return the {@link OpenTelemetryConfiguration}. */
|
||||
static OpenTelemetryConfiguration parse(InputStream configuration) {
|
||||
Object yamlObj = YAML.load(configuration);
|
||||
return MAPPER.convertValue(yamlObj, OpenTelemetryConfiguration.class);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,264 @@
|
|||
/*
|
||||
* Copyright The OpenTelemetry Authors
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
package io.opentelemetry.sdk.extension.incubator.fileconfig;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Aggregation;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOff;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AlwaysOn;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.AttributeLimits;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Attributes;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchLogRecordProcessor;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.BatchSpanProcessor;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Console;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ExplicitBucketHistogram;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Headers;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordExporter;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordLimits;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LogRecordProcessor;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.LoggerProvider;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MeterProvider;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricExporter;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.MetricReader;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OpenTelemetryConfiguration;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Otlp;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.OtlpMetric;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.ParentBased;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PeriodicMetricReader;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Prometheus;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.PullMetricReader;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Resource;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Sampler;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Selector;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SimpleSpanProcessor;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanExporter;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanLimits;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.SpanProcessor;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.Stream;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TraceIdRatioBased;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.TracerProvider;
|
||||
import io.opentelemetry.sdk.extension.incubator.fileconfig.internal.model.View;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
class ConfigurationReaderTest {
|
||||
|
||||
@Test
|
||||
void read_KitchenSinkExampleFile() throws IOException {
|
||||
OpenTelemetryConfiguration expected = new OpenTelemetryConfiguration();
|
||||
|
||||
expected.withFileFormat("0.1");
|
||||
|
||||
// General config
|
||||
Resource resource =
|
||||
new Resource().withAttributes(new Attributes().withServiceName("unknown_service"));
|
||||
expected.withResource(resource);
|
||||
|
||||
AttributeLimits attributeLimits =
|
||||
new AttributeLimits().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128);
|
||||
expected.withAttributeLimits(attributeLimits);
|
||||
|
||||
List<String> propagators =
|
||||
Arrays.asList("tracecontext", "baggage", "b3", "b3multi", "jaeger", "xray", "ottrace");
|
||||
expected.withPropagators(propagators);
|
||||
|
||||
// TracerProvider config
|
||||
TracerProvider tracerProvider = new TracerProvider();
|
||||
|
||||
SpanLimits spanLimits =
|
||||
new SpanLimits()
|
||||
.withAttributeValueLengthLimit(4096)
|
||||
.withAttributeCountLimit(128)
|
||||
.withEventCountLimit(128)
|
||||
.withLinkCountLimit(128)
|
||||
.withEventAttributeCountLimit(128)
|
||||
.withLinkAttributeCountLimit(128);
|
||||
tracerProvider.withLimits(spanLimits);
|
||||
|
||||
Sampler sampler =
|
||||
new Sampler()
|
||||
.withParentBased(
|
||||
new ParentBased()
|
||||
.withRoot(
|
||||
new Sampler()
|
||||
.withTraceIdRatioBased(new TraceIdRatioBased().withRatio(0.0001)))
|
||||
.withRemoteParentSampled(new Sampler().withAlwaysOn(new AlwaysOn()))
|
||||
.withRemoteParentNotSampled(new Sampler().withAlwaysOff(new AlwaysOff()))
|
||||
.withLocalParentSampled(new Sampler().withAlwaysOn(new AlwaysOn()))
|
||||
.withLocalParentNotSampled(new Sampler().withAlwaysOff(new AlwaysOff())));
|
||||
tracerProvider.withSampler(sampler);
|
||||
|
||||
SpanProcessor spanProcessor1 =
|
||||
new SpanProcessor()
|
||||
.withBatch(
|
||||
new BatchSpanProcessor()
|
||||
.withScheduleDelay(5_000)
|
||||
.withExportTimeout(30_000)
|
||||
.withMaxQueueSize(2048)
|
||||
.withMaxExportBatchSize(512)
|
||||
.withExporter(
|
||||
new SpanExporter()
|
||||
.withOtlp(
|
||||
new Otlp()
|
||||
.withProtocol("http/protobuf")
|
||||
.withEndpoint("http://localhost:4318")
|
||||
.withCertificate("/app/cert.pem")
|
||||
.withClientKey("/app/cert.pem")
|
||||
.withClientCertificate("/app/cert.pem")
|
||||
.withHeaders(
|
||||
new Headers().withAdditionalProperty("api-key", "1234"))
|
||||
.withCompression("gzip")
|
||||
.withTimeout(10_000))));
|
||||
SpanProcessor spanProcessor2 =
|
||||
new SpanProcessor()
|
||||
.withSimple(
|
||||
new SimpleSpanProcessor()
|
||||
.withExporter(new SpanExporter().withConsole(new Console())));
|
||||
tracerProvider.withProcessors(Arrays.asList(spanProcessor1, spanProcessor2));
|
||||
|
||||
expected.withTracerProvider(tracerProvider);
|
||||
// end TracerProvider config
|
||||
|
||||
// LoggerProvider config
|
||||
LoggerProvider loggerProvider = new LoggerProvider();
|
||||
|
||||
LogRecordLimits logRecordLimits =
|
||||
new LogRecordLimits().withAttributeValueLengthLimit(4096).withAttributeCountLimit(128);
|
||||
loggerProvider.withLimits(logRecordLimits);
|
||||
|
||||
LogRecordProcessor logRecordProcessor =
|
||||
new LogRecordProcessor()
|
||||
.withBatch(
|
||||
new BatchLogRecordProcessor()
|
||||
.withScheduleDelay(5_000)
|
||||
.withExportTimeout(30_000)
|
||||
.withMaxQueueSize(2048)
|
||||
.withMaxExportBatchSize(512)
|
||||
.withExporter(
|
||||
new LogRecordExporter()
|
||||
.withOtlp(
|
||||
new Otlp()
|
||||
.withProtocol("http/protobuf")
|
||||
.withEndpoint("http://localhost:4318")
|
||||
.withCertificate("/app/cert.pem")
|
||||
.withClientKey("/app/cert.pem")
|
||||
.withClientCertificate("/app/cert.pem")
|
||||
.withHeaders(
|
||||
new Headers().withAdditionalProperty("api-key", "1234"))
|
||||
.withCompression("gzip")
|
||||
.withTimeout(10_000))));
|
||||
loggerProvider.withProcessors(Collections.singletonList(logRecordProcessor));
|
||||
|
||||
expected.withLoggerProvider(loggerProvider);
|
||||
// end LoggerProvider config
|
||||
|
||||
// MeterProvider config
|
||||
MeterProvider meterProvider = new MeterProvider();
|
||||
|
||||
MetricReader metricReader1 =
|
||||
new MetricReader()
|
||||
.withPull(
|
||||
new PullMetricReader()
|
||||
.withExporter(
|
||||
new MetricExporter()
|
||||
.withPrometheus(
|
||||
new Prometheus().withHost("localhost").withPort(9464))));
|
||||
MetricReader metricReader2 =
|
||||
new MetricReader()
|
||||
.withPeriodic(
|
||||
new PeriodicMetricReader()
|
||||
.withInterval(5_000)
|
||||
.withTimeout(30_000)
|
||||
.withExporter(
|
||||
new MetricExporter()
|
||||
.withOtlp(
|
||||
new OtlpMetric()
|
||||
.withProtocol("http/protobuf")
|
||||
.withEndpoint("http://localhost:4318")
|
||||
.withCertificate("/app/cert.pem")
|
||||
.withClientKey("/app/cert.pem")
|
||||
.withClientCertificate("/app/cert.pem")
|
||||
.withHeaders(
|
||||
new Headers().withAdditionalProperty("api-key", "1234"))
|
||||
.withCompression("gzip")
|
||||
.withTimeout(10_000)
|
||||
.withTemporalityPreference("delta")
|
||||
.withDefaultHistogramAggregation(
|
||||
"exponential_bucket_histogram"))));
|
||||
MetricReader metricReader3 =
|
||||
new MetricReader()
|
||||
.withPeriodic(
|
||||
new PeriodicMetricReader()
|
||||
.withExporter(new MetricExporter().withConsole(new Console())));
|
||||
meterProvider.withReaders(Arrays.asList(metricReader1, metricReader2, metricReader3));
|
||||
|
||||
View view =
|
||||
new View()
|
||||
.withSelector(
|
||||
new Selector()
|
||||
.withInstrumentName("my-instrument")
|
||||
.withInstrumentType("histogram")
|
||||
.withMeterName("my-meter")
|
||||
.withMeterVersion("1.0.0")
|
||||
.withMeterSchemaUrl("https://opentelemetry.io/schemas/1.16.0"))
|
||||
.withStream(
|
||||
new Stream()
|
||||
.withName("new_instrument_name")
|
||||
.withDescription("new_description")
|
||||
.withAggregation(
|
||||
new Aggregation()
|
||||
.withExplicitBucketHistogram(
|
||||
new ExplicitBucketHistogram()
|
||||
.withBoundaries(
|
||||
Arrays.asList(
|
||||
0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0,
|
||||
750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0))
|
||||
.withRecordMinMax(true)))
|
||||
.withAttributeKeys(Arrays.asList("key1", "key2")));
|
||||
meterProvider.withViews(Collections.singletonList(view));
|
||||
|
||||
expected.withMeterProvider(meterProvider);
|
||||
// end MeterProvider config
|
||||
|
||||
try (FileInputStream configExampleFile =
|
||||
new FileInputStream(System.getenv("CONFIG_EXAMPLE_DIR") + "/kitchen-sink.yaml")) {
|
||||
OpenTelemetryConfiguration config = ConfigurationReader.parse(configExampleFile);
|
||||
|
||||
// General config
|
||||
assertThat(config.getFileFormat()).isEqualTo("0.1");
|
||||
assertThat(config.getResource()).isEqualTo(resource);
|
||||
assertThat(config.getAttributeLimits()).isEqualTo(attributeLimits);
|
||||
assertThat(config.getPropagators()).isEqualTo(propagators);
|
||||
|
||||
// TracerProvider config
|
||||
TracerProvider configTracerProvider = config.getTracerProvider();
|
||||
assertThat(configTracerProvider.getLimits()).isEqualTo(spanLimits);
|
||||
assertThat(configTracerProvider.getSampler()).isEqualTo(sampler);
|
||||
assertThat(configTracerProvider.getProcessors())
|
||||
.isEqualTo(Arrays.asList(spanProcessor1, spanProcessor2));
|
||||
|
||||
// LoggerProvider config
|
||||
LoggerProvider configLoggerProvider = config.getLoggerProvider();
|
||||
assertThat(configLoggerProvider.getLimits()).isEqualTo(logRecordLimits);
|
||||
assertThat(configLoggerProvider.getProcessors())
|
||||
.isEqualTo(Collections.singletonList(logRecordProcessor));
|
||||
|
||||
// MeterProvider config
|
||||
MeterProvider configMeterProvider = config.getMeterProvider();
|
||||
assertThat(configMeterProvider.getReaders())
|
||||
.isEqualTo(Arrays.asList(metricReader1, metricReader2, metricReader3));
|
||||
assertThat(configMeterProvider.getViews()).isEqualTo(Collections.singletonList(view));
|
||||
|
||||
// All configuration
|
||||
assertThat(config).isEqualTo(expected);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,6 +3,8 @@ pluginManagement {
|
|||
id("com.github.ben-manes.versions") version "0.47.0"
|
||||
id("com.github.johnrengelman.shadow") version "8.1.1"
|
||||
id("com.gradle.enterprise") version "3.14.1"
|
||||
id("de.undercouch.download") version "5.4.0"
|
||||
id("org.jsonschema2pojo") version "1.2.1"
|
||||
id("io.github.gradle-nexus.publish-plugin") version "1.3.0"
|
||||
id("org.graalvm.buildtools.native") version "0.9.23"
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue