Exponential Histogram support to the Prometheus exporter (#6015)

Signed-off-by: Fabian Stäber <fabian@fstab.de>
This commit is contained in:
Fabian Stäber 2024-01-03 16:51:35 +01:00 committed by GitHub
parent de65a4ba1e
commit d45fb3f5dc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1897 additions and 2334 deletions

View File

@ -69,6 +69,7 @@ val DEPENDENCIES = listOf(
"io.opentelemetry.contrib:opentelemetry-aws-xray-propagator:1.29.0-alpha",
"io.opentracing:opentracing-api:0.33.0",
"io.opentracing:opentracing-noop:0.33.0",
"io.prometheus:prometheus-metrics-exporter-httpserver:1.1.0",
"junit:junit:4.13.2",
"nl.jqno.equalsverifier:equalsverifier:3.15.5",
"org.awaitility:awaitility:4.2.0",

View File

@ -12,14 +12,15 @@ dependencies {
api(project(":sdk:metrics"))
implementation(project(":sdk-extensions:autoconfigure-spi"))
implementation("io.prometheus:prometheus-metrics-exporter-httpserver")
compileOnly("com.sun.net.httpserver:http")
compileOnly("com.google.auto.value:auto-value-annotations")
annotationProcessor("com.google.auto.value:auto-value")
testImplementation(project(":sdk:testing"))
testImplementation("io.opentelemetry.proto:opentelemetry-proto")
testImplementation("com.sun.net.httpserver:http")
testImplementation("com.google.guava:guava")
testImplementation("com.linecorp.armeria:armeria")
testImplementation("com.linecorp.armeria:armeria-junit5")

View File

@ -1,48 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Function;
import java.util.regex.Pattern;
/** Sanitizes a metric or label name. */
class NameSanitizer implements Function<String, String> {
static final NameSanitizer INSTANCE = new NameSanitizer();
static final Pattern SANITIZE_CONSECUTIVE_UNDERSCORES = Pattern.compile("[_]{2,}");
private static final Pattern SANITIZE_PREFIX_PATTERN = Pattern.compile("^[^a-zA-Z_:]");
private static final Pattern SANITIZE_BODY_PATTERN = Pattern.compile("[^a-zA-Z0-9_:]");
private final Function<String, String> delegate;
private final Map<String, String> cache = new ConcurrentHashMap<>();
NameSanitizer() {
this(NameSanitizer::sanitizeMetricName);
}
// visible for testing
NameSanitizer(Function<String, String> delegate) {
this.delegate = delegate;
}
@Override
public String apply(String labelName) {
return cache.computeIfAbsent(labelName, delegate);
}
private static String sanitizeMetricName(String metricName) {
return SANITIZE_CONSECUTIVE_UNDERSCORES
.matcher(
SANITIZE_BODY_PATTERN
.matcher(SANITIZE_PREFIX_PATTERN.matcher(metricName).replaceFirst("_"))
.replaceAll("_"))
.replaceAll("_");
}
}

View File

@ -0,0 +1,551 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import static io.prometheus.metrics.model.snapshots.PrometheusNaming.sanitizeLabelName;
import static io.prometheus.metrics.model.snapshots.PrometheusNaming.sanitizeMetricName;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.HistogramData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.SumData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.data.ValueAtQuantile;
import io.opentelemetry.sdk.resources.Resource;
import io.prometheus.metrics.model.snapshots.ClassicHistogramBuckets;
import io.prometheus.metrics.model.snapshots.CounterSnapshot;
import io.prometheus.metrics.model.snapshots.CounterSnapshot.CounterDataPointSnapshot;
import io.prometheus.metrics.model.snapshots.Exemplar;
import io.prometheus.metrics.model.snapshots.Exemplars;
import io.prometheus.metrics.model.snapshots.GaugeSnapshot;
import io.prometheus.metrics.model.snapshots.GaugeSnapshot.GaugeDataPointSnapshot;
import io.prometheus.metrics.model.snapshots.HistogramSnapshot;
import io.prometheus.metrics.model.snapshots.HistogramSnapshot.HistogramDataPointSnapshot;
import io.prometheus.metrics.model.snapshots.InfoSnapshot;
import io.prometheus.metrics.model.snapshots.InfoSnapshot.InfoDataPointSnapshot;
import io.prometheus.metrics.model.snapshots.Labels;
import io.prometheus.metrics.model.snapshots.MetricMetadata;
import io.prometheus.metrics.model.snapshots.MetricSnapshot;
import io.prometheus.metrics.model.snapshots.MetricSnapshots;
import io.prometheus.metrics.model.snapshots.NativeHistogramBuckets;
import io.prometheus.metrics.model.snapshots.Quantile;
import io.prometheus.metrics.model.snapshots.Quantiles;
import io.prometheus.metrics.model.snapshots.SummarySnapshot;
import io.prometheus.metrics.model.snapshots.SummarySnapshot.SummaryDataPointSnapshot;
import io.prometheus.metrics.model.snapshots.Unit;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/** Convert OpenTelemetry {@link MetricData} to Prometheus {@link MetricSnapshots}. */
final class Otel2PrometheusConverter {
private static final Logger LOGGER = Logger.getLogger(Otel2PrometheusConverter.class.getName());
private static final ThrottlingLogger THROTTLING_LOGGER = new ThrottlingLogger(LOGGER);
private final boolean otelScopeEnabled;
private static final String OTEL_SCOPE_NAME = "otel_scope_name";
private static final String OTEL_SCOPE_VERSION = "otel_scope_version";
private static final long NANOS_PER_MILLISECOND = TimeUnit.MILLISECONDS.toNanos(1);
/**
* Constructor with feature flag parameter.
*
* @param otelScopeEnabled enable generation of the OpenTelemetry instrumentation scope info
* metric and labels.
*/
Otel2PrometheusConverter(boolean otelScopeEnabled) {
this.otelScopeEnabled = otelScopeEnabled;
}
MetricSnapshots convert(@Nullable Collection<MetricData> metricDataCollection) {
if (metricDataCollection == null || metricDataCollection.isEmpty()) {
return MetricSnapshots.of();
}
Map<String, MetricSnapshot> snapshotsByName = new HashMap<>(metricDataCollection.size());
Resource resource = null;
Set<InstrumentationScopeInfo> scopes = new LinkedHashSet<>();
for (MetricData metricData : metricDataCollection) {
MetricSnapshot snapshot = convert(metricData);
if (snapshot == null) {
continue;
}
putOrMerge(snapshotsByName, snapshot);
if (resource == null) {
resource = metricData.getResource();
}
if (otelScopeEnabled && !metricData.getInstrumentationScopeInfo().getAttributes().isEmpty()) {
scopes.add(metricData.getInstrumentationScopeInfo());
}
}
if (resource != null) {
putOrMerge(snapshotsByName, makeTargetInfo(resource));
}
if (otelScopeEnabled && !scopes.isEmpty()) {
putOrMerge(snapshotsByName, makeScopeInfo(scopes));
}
return new MetricSnapshots(snapshotsByName.values());
}
@Nullable
private MetricSnapshot convert(MetricData metricData) {
// Note that AggregationTemporality.DELTA should never happen
// because PrometheusMetricReader#getAggregationTemporality returns CUMULATIVE.
MetricMetadata metadata = convertMetadata(metricData);
InstrumentationScopeInfo scope = metricData.getInstrumentationScopeInfo();
switch (metricData.getType()) {
case LONG_GAUGE:
return convertLongGauge(metadata, scope, metricData.getLongGaugeData().getPoints());
case DOUBLE_GAUGE:
return convertDoubleGauge(metadata, scope, metricData.getDoubleGaugeData().getPoints());
case LONG_SUM:
SumData<LongPointData> longSumData = metricData.getLongSumData();
if (longSumData.getAggregationTemporality() == AggregationTemporality.DELTA) {
return null;
} else if (longSumData.isMonotonic()) {
return convertLongCounter(metadata, scope, longSumData.getPoints());
} else {
return convertLongGauge(metadata, scope, longSumData.getPoints());
}
case DOUBLE_SUM:
SumData<DoublePointData> doubleSumData = metricData.getDoubleSumData();
if (doubleSumData.getAggregationTemporality() == AggregationTemporality.DELTA) {
return null;
} else if (doubleSumData.isMonotonic()) {
return convertDoubleCounter(metadata, scope, doubleSumData.getPoints());
} else {
return convertDoubleGauge(metadata, scope, doubleSumData.getPoints());
}
case HISTOGRAM:
HistogramData histogramData = metricData.getHistogramData();
if (histogramData.getAggregationTemporality() == AggregationTemporality.DELTA) {
return null;
} else {
return convertHistogram(metadata, scope, histogramData.getPoints());
}
case EXPONENTIAL_HISTOGRAM:
ExponentialHistogramData exponentialHistogramData =
metricData.getExponentialHistogramData();
if (exponentialHistogramData.getAggregationTemporality() == AggregationTemporality.DELTA) {
return null;
} else {
return convertExponentialHistogram(metadata, scope, exponentialHistogramData.getPoints());
}
case SUMMARY:
return convertSummary(metadata, scope, metricData.getSummaryData().getPoints());
}
return null;
}
private GaugeSnapshot convertLongGauge(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<LongPointData> dataPoints) {
List<GaugeDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (LongPointData longData : dataPoints) {
data.add(
new GaugeDataPointSnapshot(
(double) longData.getValue(),
convertAttributes(scope, longData.getAttributes()),
convertLongExemplar(longData.getExemplars())));
}
return new GaugeSnapshot(metadata, data);
}
private CounterSnapshot convertLongCounter(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<LongPointData> dataPoints) {
List<CounterDataPointSnapshot> data =
new ArrayList<CounterDataPointSnapshot>(dataPoints.size());
for (LongPointData longData : dataPoints) {
data.add(
new CounterDataPointSnapshot(
(double) longData.getValue(),
convertAttributes(scope, longData.getAttributes()),
convertLongExemplar(longData.getExemplars()),
longData.getStartEpochNanos() / NANOS_PER_MILLISECOND));
}
return new CounterSnapshot(metadata, data);
}
private GaugeSnapshot convertDoubleGauge(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<DoublePointData> dataPoints) {
List<GaugeDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (DoublePointData doubleData : dataPoints) {
data.add(
new GaugeDataPointSnapshot(
doubleData.getValue(),
convertAttributes(scope, doubleData.getAttributes()),
convertDoubleExemplar(doubleData.getExemplars())));
}
return new GaugeSnapshot(metadata, data);
}
private CounterSnapshot convertDoubleCounter(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<DoublePointData> dataPoints) {
List<CounterDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (DoublePointData doubleData : dataPoints) {
data.add(
new CounterDataPointSnapshot(
doubleData.getValue(),
convertAttributes(scope, doubleData.getAttributes()),
convertDoubleExemplar(doubleData.getExemplars()),
doubleData.getStartEpochNanos() / NANOS_PER_MILLISECOND));
}
return new CounterSnapshot(metadata, data);
}
private HistogramSnapshot convertHistogram(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<HistogramPointData> dataPoints) {
List<HistogramDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (HistogramPointData histogramData : dataPoints) {
List<Double> boundaries = new ArrayList<>(histogramData.getBoundaries().size() + 1);
boundaries.addAll(histogramData.getBoundaries());
boundaries.add(Double.POSITIVE_INFINITY);
data.add(
new HistogramDataPointSnapshot(
ClassicHistogramBuckets.of(boundaries, histogramData.getCounts()),
histogramData.getSum(),
convertAttributes(scope, histogramData.getAttributes()),
convertDoubleExemplars(histogramData.getExemplars()),
histogramData.getStartEpochNanos() / NANOS_PER_MILLISECOND));
}
return new HistogramSnapshot(metadata, data);
}
@Nullable
private HistogramSnapshot convertExponentialHistogram(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<ExponentialHistogramPointData> dataPoints) {
List<HistogramDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (ExponentialHistogramPointData histogramData : dataPoints) {
int scale = histogramData.getScale();
if (scale < -4) {
THROTTLING_LOGGER.log(
Level.WARNING,
"Dropping histogram "
+ metadata.getName()
+ " with attributes "
+ histogramData.getAttributes()
+ " because it has scale < -4 which is unsupported in Prometheus");
return null;
}
// Scale > 8 are not supported in Prometheus. Histograms with scale > 8 are scaled down to 8.
int scaleDown = scale > 8 ? scale - 8 : 0;
data.add(
new HistogramDataPointSnapshot(
scale - scaleDown,
histogramData.getZeroCount(),
0L,
convertExponentialHistogramBuckets(histogramData.getPositiveBuckets(), scaleDown),
convertExponentialHistogramBuckets(histogramData.getNegativeBuckets(), scaleDown),
histogramData.getSum(),
convertAttributes(scope, histogramData.getAttributes()),
convertDoubleExemplars(histogramData.getExemplars()),
histogramData.getStartEpochNanos() / NANOS_PER_MILLISECOND));
}
return new HistogramSnapshot(metadata, data);
}
private static NativeHistogramBuckets convertExponentialHistogramBuckets(
ExponentialHistogramBuckets buckets, int scaleDown) {
if (buckets.getBucketCounts().isEmpty()) {
return NativeHistogramBuckets.EMPTY;
}
List<Long> otelCounts = buckets.getBucketCounts();
List<Integer> indexes = new ArrayList<>(otelCounts.size());
List<Long> counts = new ArrayList<>(otelCounts.size());
int previousIndex = (buckets.getOffset() >> scaleDown) + 1;
long count = 0;
for (int i = 0; i < otelCounts.size(); i++) {
int index = ((buckets.getOffset() + i) >> scaleDown) + 1;
if (index > previousIndex) {
indexes.add(previousIndex);
counts.add(count);
previousIndex = index;
count = 0;
}
count += otelCounts.get(i);
}
indexes.add(previousIndex);
counts.add(count);
return NativeHistogramBuckets.of(indexes, counts);
}
private SummarySnapshot convertSummary(
MetricMetadata metadata,
InstrumentationScopeInfo scope,
Collection<SummaryPointData> dataPoints) {
List<SummaryDataPointSnapshot> data = new ArrayList<>(dataPoints.size());
for (SummaryPointData summaryData : dataPoints) {
data.add(
new SummaryDataPointSnapshot(
summaryData.getCount(),
summaryData.getSum(),
convertQuantiles(summaryData.getValues()),
convertAttributes(scope, summaryData.getAttributes()),
Exemplars.EMPTY, // Exemplars for Summaries not implemented yet.
summaryData.getStartEpochNanos() / NANOS_PER_MILLISECOND));
}
return new SummarySnapshot(metadata, data);
}
private static Quantiles convertQuantiles(List<ValueAtQuantile> values) {
List<Quantile> result = new ArrayList<>(values.size());
for (ValueAtQuantile value : values) {
result.add(new Quantile(value.getQuantile(), value.getValue()));
}
return Quantiles.of(result);
}
@Nullable
private Exemplar convertLongExemplar(List<LongExemplarData> exemplars) {
if (exemplars.isEmpty()) {
return null;
} else {
LongExemplarData exemplar = exemplars.get(0);
return convertExemplar((double) exemplar.getValue(), exemplar);
}
}
/** Converts the first exemplar in the list if available, else returns {#code null}. */
@Nullable
private Exemplar convertDoubleExemplar(List<DoubleExemplarData> exemplars) {
if (exemplars.isEmpty()) {
return null;
} else {
DoubleExemplarData exemplar = exemplars.get(0);
return convertExemplar(exemplar.getValue(), exemplar);
}
}
/** Converts the first exemplar in the list if available, else returns {#code null}. */
private Exemplars convertDoubleExemplars(List<DoubleExemplarData> exemplars) {
List<Exemplar> result = new ArrayList<>(exemplars.size());
for (DoubleExemplarData exemplar : exemplars) {
result.add(convertExemplar(exemplar.getValue(), exemplar));
}
return Exemplars.of(result);
}
private Exemplar convertExemplar(double value, ExemplarData exemplar) {
SpanContext spanContext = exemplar.getSpanContext();
if (spanContext.isValid()) {
return new Exemplar(
value,
convertAttributes(
null,
exemplar.getFilteredAttributes(),
"trace_id",
spanContext.getTraceId(),
"span_id",
spanContext.getSpanId()),
exemplar.getEpochNanos() / NANOS_PER_MILLISECOND);
} else {
return new Exemplar(
value,
convertAttributes(null, exemplar.getFilteredAttributes()),
exemplar.getEpochNanos() / NANOS_PER_MILLISECOND);
}
}
private InfoSnapshot makeTargetInfo(Resource resource) {
return new InfoSnapshot(
new MetricMetadata("target"),
Collections.singletonList(
new InfoDataPointSnapshot(convertAttributes(null, resource.getAttributes()))));
}
private InfoSnapshot makeScopeInfo(Set<InstrumentationScopeInfo> scopes) {
List<InfoDataPointSnapshot> prometheusScopeInfos = new ArrayList<>(scopes.size());
for (InstrumentationScopeInfo scope : scopes) {
prometheusScopeInfos.add(
new InfoDataPointSnapshot(convertAttributes(scope, scope.getAttributes())));
}
return new InfoSnapshot(new MetricMetadata("otel_scope"), prometheusScopeInfos);
}
/**
* Convert OpenTelemetry attributes to Prometheus labels.
*
* @param scope will be converted to {@code otel_scope_*} labels if {@code otelScopeEnabled} is
* {@code true}.
* @param attributes the attributes to be converted.
* @param additionalAttributes optional list of key/value pairs, may be empty.
*/
private Labels convertAttributes(
@Nullable InstrumentationScopeInfo scope,
Attributes attributes,
String... additionalAttributes) {
int numberOfScopeAttributes = 0;
if (otelScopeEnabled && scope != null) {
numberOfScopeAttributes = scope.getVersion() == null ? 1 : 2;
}
String[] names =
new String[attributes.size() + numberOfScopeAttributes + additionalAttributes.length / 2];
String[] values = new String[names.length];
int[] pos = new int[] {0}; // using an array because we want to increment in a forEach() lambda.
attributes.forEach(
(key, value) -> {
names[pos[0]] = sanitizeLabelName(key.getKey());
values[pos[0]] = value.toString();
pos[0]++;
});
for (int i = 0; i < additionalAttributes.length; i += 2) {
names[pos[0]] = additionalAttributes[i];
values[pos[0]] = additionalAttributes[i + 1];
pos[0]++;
}
if (otelScopeEnabled && scope != null) {
names[pos[0]] = OTEL_SCOPE_NAME;
values[pos[0]] = scope.getName();
pos[0]++;
if (scope.getVersion() != null) {
names[pos[0]] = OTEL_SCOPE_VERSION;
values[pos[0]] = scope.getVersion();
pos[0]++;
}
}
return Labels.of(names, values);
}
private static MetricMetadata convertMetadata(MetricData metricData) {
String name = sanitizeMetricName(metricData.getName());
String help = metricData.getDescription();
Unit unit = PrometheusUnitsHelper.convertUnit(metricData.getUnit());
if (unit != null && !name.endsWith(unit.toString())) {
name += "_" + unit;
}
return new MetricMetadata(name, help, unit);
}
private static void putOrMerge(
Map<String, MetricSnapshot> snapshotsByName, MetricSnapshot snapshot) {
String name = snapshot.getMetadata().getName();
if (snapshotsByName.containsKey(name)) {
MetricSnapshot merged = merge(snapshotsByName.get(name), snapshot);
if (merged != null) {
snapshotsByName.put(name, merged);
}
} else {
snapshotsByName.put(name, snapshot);
}
}
/**
* OpenTelemetry may use the same metric name multiple times but in different instrumentation
* scopes. In that case, we try to merge the metrics. They will have different {@code
* otel_scope_name} attributes. However, merging is only possible if the metrics have the same
* type. If the type differs, we log a message and drop one of them.
*/
@Nullable
private static MetricSnapshot merge(MetricSnapshot a, MetricSnapshot b) {
MetricMetadata metadata = mergeMetadata(a.getMetadata(), b.getMetadata());
if (metadata == null) {
return null;
}
int numberOfDataPoints = a.getDataPoints().size() + b.getDataPoints().size();
if (a instanceof GaugeSnapshot && b instanceof GaugeSnapshot) {
List<GaugeDataPointSnapshot> dataPoints = new ArrayList<>(numberOfDataPoints);
dataPoints.addAll(((GaugeSnapshot) a).getDataPoints());
dataPoints.addAll(((GaugeSnapshot) b).getDataPoints());
return new GaugeSnapshot(metadata, dataPoints);
} else if (a instanceof CounterSnapshot && b instanceof CounterSnapshot) {
List<CounterDataPointSnapshot> dataPoints = new ArrayList<>(numberOfDataPoints);
dataPoints.addAll(((CounterSnapshot) a).getDataPoints());
dataPoints.addAll(((CounterSnapshot) b).getDataPoints());
return new CounterSnapshot(metadata, dataPoints);
} else if (a instanceof HistogramSnapshot && b instanceof HistogramSnapshot) {
List<HistogramDataPointSnapshot> dataPoints = new ArrayList<>(numberOfDataPoints);
dataPoints.addAll(((HistogramSnapshot) a).getDataPoints());
dataPoints.addAll(((HistogramSnapshot) b).getDataPoints());
return new HistogramSnapshot(metadata, dataPoints);
} else if (a instanceof SummarySnapshot && b instanceof SummarySnapshot) {
List<SummaryDataPointSnapshot> dataPoints = new ArrayList<>(numberOfDataPoints);
dataPoints.addAll(((SummarySnapshot) a).getDataPoints());
dataPoints.addAll(((SummarySnapshot) b).getDataPoints());
return new SummarySnapshot(metadata, dataPoints);
} else if (a instanceof InfoSnapshot && b instanceof InfoSnapshot) {
List<InfoDataPointSnapshot> dataPoints = new ArrayList<>(numberOfDataPoints);
dataPoints.addAll(((InfoSnapshot) a).getDataPoints());
dataPoints.addAll(((InfoSnapshot) b).getDataPoints());
return new InfoSnapshot(metadata, dataPoints);
} else {
THROTTLING_LOGGER.log(
Level.WARNING,
"Conflicting metric name "
+ a.getMetadata().getPrometheusName()
+ ": Found one metric with type "
+ typeString(a)
+ " and one of type "
+ typeString(b)
+ ". Dropping the one with type "
+ typeString(b)
+ ".");
return null;
}
}
@Nullable
private static MetricMetadata mergeMetadata(MetricMetadata a, MetricMetadata b) {
String name = a.getPrometheusName();
if (a.getName().equals(b.getName())) {
name = a.getName();
}
String help = null;
if (a.getHelp() != null && a.getHelp().equals(b.getHelp())) {
help = a.getHelp();
}
Unit unit = a.getUnit();
if (unit != null && !unit.equals(b.getUnit())) {
THROTTLING_LOGGER.log(
Level.WARNING,
"Conflicting metrics: Multiple metrics with name "
+ name
+ " but different units found. Dropping the one with unit "
+ b.getUnit());
return null;
}
return new MetricMetadata(name, help, unit);
}
private static String typeString(MetricSnapshot snapshot) {
// Simple helper for a log message.
return snapshot.getClass().getSimpleName().replace("Snapshot", "").toLowerCase(Locale.ENGLISH);
}
}

View File

@ -10,56 +10,31 @@
package io.opentelemetry.exporter.prometheus;
import static java.util.stream.Collectors.joining;
import com.sun.net.httpserver.HttpExchange;
import com.sun.net.httpserver.HttpHandler;
import com.sun.net.httpserver.HttpServer;
import io.opentelemetry.sdk.common.CompletableResultCode;
import io.opentelemetry.sdk.internal.DaemonThreadFactory;
import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.export.CollectionRegistration;
import io.opentelemetry.sdk.metrics.export.MetricReader;
import io.prometheus.metrics.exporter.httpserver.HTTPServer;
import io.prometheus.metrics.exporter.httpserver.MetricsHandler;
import io.prometheus.metrics.model.registry.PrometheusRegistry;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.zip.GZIPOutputStream;
import javax.annotation.Nullable;
/**
* A {@link MetricReader} that starts an HTTP server that will collect metrics and serialize to
* Prometheus text format on request.
*/
// Very similar to
// https://github.com/prometheus/client_java/blob/master/simpleclient_httpserver/src/main/java/io/prometheus/client/exporter/HTTPServer.java
public final class PrometheusHttpServer implements MetricReader {
private static final DaemonThreadFactory THREAD_FACTORY =
new DaemonThreadFactory("prometheus-http");
private static final Logger LOGGER = Logger.getLogger(PrometheusHttpServer.class.getName());
private final HttpServer server;
private final ExecutorService executor;
private volatile CollectionRegistration collectionRegistration = CollectionRegistration.noop();
private final HTTPServer httpServer;
private final PrometheusMetricReader prometheusMetricReader;
private final PrometheusRegistry prometheusRegistry;
private final String host;
/**
* Returns a new {@link PrometheusHttpServer} which can be registered to an {@link
@ -75,87 +50,61 @@ public final class PrometheusHttpServer implements MetricReader {
return new PrometheusHttpServerBuilder();
}
PrometheusHttpServer(String host, int port, ExecutorService executor) {
PrometheusHttpServer(
String host,
int port,
@Nullable ExecutorService executor,
PrometheusRegistry prometheusRegistry,
boolean otelScopeEnabled) {
this.prometheusMetricReader = new PrometheusMetricReader(otelScopeEnabled);
this.host = host;
this.prometheusRegistry = prometheusRegistry;
prometheusRegistry.register(prometheusMetricReader);
try {
server = createServer(host, port);
this.httpServer =
HTTPServer.builder()
.hostname(host)
.port(port)
.executorService(executor)
.registry(prometheusRegistry)
.defaultHandler(new MetricsHandler(prometheusRegistry))
.buildAndStart();
} catch (IOException e) {
throw new UncheckedIOException("Could not create Prometheus HTTP server", e);
}
MetricsHandler metricsHandler =
new MetricsHandler(() -> collectionRegistration.collectAllMetrics());
server.createContext("/", metricsHandler);
server.createContext("/metrics", metricsHandler);
server.createContext("/-/healthy", HealthHandler.INSTANCE);
this.executor = executor;
server.setExecutor(executor);
start();
}
private static HttpServer createServer(String host, int port) throws IOException {
IOException exception = null;
for (InetAddress address : InetAddress.getAllByName(host)) {
try {
return HttpServer.create(new InetSocketAddress(address, port), 3);
} catch (IOException e) {
if (exception == null) {
exception = e;
} else {
exception.addSuppressed(e);
}
}
}
assert exception != null;
throw exception;
}
private void start() {
// server.start must be called from a daemon thread for it to be a daemon.
if (Thread.currentThread().isDaemon()) {
server.start();
return;
}
Thread thread = THREAD_FACTORY.newThread(server::start);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) {
return AggregationTemporality.CUMULATIVE;
return prometheusMetricReader.getAggregationTemporality(instrumentType);
}
@Override
public void register(CollectionRegistration registration) {
this.collectionRegistration = registration;
prometheusMetricReader.register(registration);
}
@Override
public CompletableResultCode forceFlush() {
return CompletableResultCode.ofSuccess();
return prometheusMetricReader.forceFlush();
}
@Override
public CompletableResultCode shutdown() {
CompletableResultCode result = new CompletableResultCode();
Thread thread =
THREAD_FACTORY.newThread(
() -> {
try {
server.stop(10);
executor.shutdownNow();
} catch (Throwable t) {
result.fail();
return;
}
result.succeed();
});
thread.start();
Runnable shutdownFunction =
() -> {
try {
prometheusRegistry.unregister(prometheusMetricReader);
httpServer.stop();
prometheusMetricReader.shutdown().whenComplete(result::succeed);
} catch (Throwable t) {
result.fail();
}
};
Thread shutdownThread = new Thread(shutdownFunction, "prometheus-httpserver-shutdown");
shutdownThread.setDaemon(true);
shutdownThread.start();
return result;
}
@ -166,112 +115,11 @@ public final class PrometheusHttpServer implements MetricReader {
@Override
public String toString() {
return "PrometheusHttpServer{address=" + server.getAddress() + "}";
return "PrometheusHttpServer{address=" + getAddress() + "}";
}
// Visible for testing.
InetSocketAddress getAddress() {
return server.getAddress();
}
private static class MetricsHandler implements HttpHandler {
private final Set<String> allConflictHeaderNames =
Collections.newSetFromMap(new ConcurrentHashMap<>());
private final Supplier<Collection<MetricData>> metricsSupplier;
private MetricsHandler(Supplier<Collection<MetricData>> metricsSupplier) {
this.metricsSupplier = metricsSupplier;
}
@Override
public void handle(HttpExchange exchange) throws IOException {
Collection<MetricData> metrics = metricsSupplier.get();
Set<String> requestedNames = parseQuery(exchange.getRequestURI().getRawQuery());
Predicate<String> filter =
requestedNames.isEmpty() ? unused -> true : requestedNames::contains;
Serializer serializer =
Serializer.create(exchange.getRequestHeaders().getFirst("Accept"), filter);
exchange.getResponseHeaders().set("Content-Type", serializer.contentType());
boolean compress = shouldUseCompression(exchange);
if (compress) {
exchange.getResponseHeaders().set("Content-Encoding", "gzip");
}
if (exchange.getRequestMethod().equals("HEAD")) {
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, -1);
} else {
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, 0);
OutputStream out;
if (compress) {
out = new GZIPOutputStream(exchange.getResponseBody());
} else {
out = exchange.getResponseBody();
}
Set<String> conflictHeaderNames = serializer.write(metrics, out);
conflictHeaderNames.removeAll(allConflictHeaderNames);
if (conflictHeaderNames.size() > 0 && LOGGER.isLoggable(Level.WARNING)) {
LOGGER.log(
Level.WARNING,
"Metric conflict(s) detected. Multiple metrics with same name but different type: "
+ conflictHeaderNames.stream().collect(joining(",", "[", "]")));
allConflictHeaderNames.addAll(conflictHeaderNames);
}
}
exchange.close();
}
}
private static boolean shouldUseCompression(HttpExchange exchange) {
List<String> encodingHeaders = exchange.getRequestHeaders().get("Accept-Encoding");
if (encodingHeaders == null) {
return false;
}
for (String encodingHeader : encodingHeaders) {
String[] encodings = encodingHeader.split(",");
for (String encoding : encodings) {
if (encoding.trim().equalsIgnoreCase("gzip")) {
return true;
}
}
}
return false;
}
private static Set<String> parseQuery(@Nullable String query) throws IOException {
if (query == null) {
return Collections.emptySet();
}
Set<String> names = new HashSet<>();
String[] pairs = query.split("&");
for (String pair : pairs) {
int idx = pair.indexOf("=");
if (idx != -1 && URLDecoder.decode(pair.substring(0, idx), "UTF-8").equals("name[]")) {
names.add(URLDecoder.decode(pair.substring(idx + 1), "UTF-8"));
}
}
return names;
}
private enum HealthHandler implements HttpHandler {
INSTANCE;
private static final byte[] RESPONSE = "Exporter is Healthy.".getBytes(StandardCharsets.UTF_8);
private static final String CONTENT_LENGTH_VALUE = String.valueOf(RESPONSE.length);
@Override
public void handle(HttpExchange exchange) throws IOException {
exchange.getResponseHeaders().set("Content-Length", CONTENT_LENGTH_VALUE);
if (exchange.getRequestMethod().equals("HEAD")) {
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, -1);
} else {
exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, RESPONSE.length);
exchange.getResponseBody().write(RESPONSE);
}
exchange.close();
}
return new InetSocketAddress(host, httpServer.getPort());
}
}

View File

@ -8,9 +8,8 @@ package io.opentelemetry.exporter.prometheus;
import static io.opentelemetry.api.internal.Utils.checkArgument;
import static java.util.Objects.requireNonNull;
import io.opentelemetry.sdk.internal.DaemonThreadFactory;
import io.prometheus.metrics.model.registry.PrometheusRegistry;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.annotation.Nullable;
/** A builder for {@link PrometheusHttpServer}. */
@ -21,6 +20,8 @@ public final class PrometheusHttpServerBuilder {
private String host = DEFAULT_HOST;
private int port = DEFAULT_PORT;
private PrometheusRegistry prometheusRegistry = new PrometheusRegistry();
private boolean otelScopeEnabled = true;
@Nullable private ExecutorService executor;
@ -46,21 +47,26 @@ public final class PrometheusHttpServerBuilder {
return this;
}
/** Sets the {@link PrometheusRegistry} to be used for {@link PrometheusHttpServer}. */
public PrometheusHttpServerBuilder setPrometheusRegistry(PrometheusRegistry prometheusRegistry) {
requireNonNull(prometheusRegistry, "prometheusRegistry");
this.prometheusRegistry = prometheusRegistry;
return this;
}
/** Set if the {@code otel_scope_*} attributes are generated. Default is {@code true}. */
public PrometheusHttpServerBuilder setOtelScopeEnabled(boolean otelScopeEnabled) {
this.otelScopeEnabled = otelScopeEnabled;
return this;
}
/**
* Returns a new {@link PrometheusHttpServer} with the configuration of this builder which can be
* registered with a {@link io.opentelemetry.sdk.metrics.SdkMeterProvider}.
*/
public PrometheusHttpServer build() {
ExecutorService executorService = this.executor;
if (executorService == null) {
executorService = getDefaultExecutor();
}
return new PrometheusHttpServer(host, port, executorService);
return new PrometheusHttpServer(host, port, executor, prometheusRegistry, otelScopeEnabled);
}
PrometheusHttpServerBuilder() {}
private static ExecutorService getDefaultExecutor() {
return Executors.newFixedThreadPool(5, new DaemonThreadFactory("prometheus-http"));
}
}

View File

@ -1,104 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.internal.StringUtils;
import io.opentelemetry.sdk.metrics.data.MetricData;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiFunction;
import javax.annotation.concurrent.Immutable;
/** A class that maps a raw metric name to Prometheus equivalent name. */
class PrometheusMetricNameMapper implements BiFunction<MetricData, PrometheusType, String> {
private static final String TOTAL_SUFFIX = "_total";
static final PrometheusMetricNameMapper INSTANCE = new PrometheusMetricNameMapper();
private final Map<ImmutableMappingKey, String> cache = new ConcurrentHashMap<>();
private final BiFunction<MetricData, PrometheusType, String> delegate;
// private constructor - prevent external object initialization
private PrometheusMetricNameMapper() {
this(PrometheusMetricNameMapper::mapToPrometheusName);
}
// Visible for testing
PrometheusMetricNameMapper(BiFunction<MetricData, PrometheusType, String> delegate) {
this.delegate = delegate;
}
@Override
public String apply(MetricData rawMetric, PrometheusType prometheusType) {
return cache.computeIfAbsent(
createKeyForCacheMapping(rawMetric, prometheusType),
metricData -> delegate.apply(rawMetric, prometheusType));
}
private static String mapToPrometheusName(MetricData rawMetric, PrometheusType prometheusType) {
String name = NameSanitizer.INSTANCE.apply(rawMetric.getName());
String prometheusEquivalentUnit =
PrometheusUnitsHelper.getEquivalentPrometheusUnit(rawMetric.getUnit());
boolean shouldAppendUnit =
!StringUtils.isNullOrEmpty(prometheusEquivalentUnit)
&& !name.contains(prometheusEquivalentUnit);
// trim counter's _total suffix so the unit is placed before it.
if (prometheusType == PrometheusType.COUNTER && name.endsWith(TOTAL_SUFFIX)) {
name = name.substring(0, name.length() - TOTAL_SUFFIX.length());
}
// append prometheus unit if not null or empty.
if (shouldAppendUnit) {
name = name + "_" + prometheusEquivalentUnit;
}
// replace _total suffix, or add if it wasn't already present.
if (prometheusType == PrometheusType.COUNTER) {
name = name + TOTAL_SUFFIX;
}
// special case - gauge
if (rawMetric.getUnit().equals("1")
&& prometheusType == PrometheusType.GAUGE
&& !name.contains("ratio")) {
name = name + "_ratio";
}
return name;
}
/**
* Creates a suitable mapping key to be used for maintaining mapping between raw metric and its
* equivalent Prometheus name.
*
* @param metricData the metric data for which the mapping is to be created.
* @param prometheusType the prometheus type to which the metric is to be mapped.
* @return an {@link ImmutableMappingKey} that can be used as a key for mapping between metric
* data and its prometheus equivalent name.
*/
private static ImmutableMappingKey createKeyForCacheMapping(
MetricData metricData, PrometheusType prometheusType) {
return ImmutableMappingKey.create(
metricData.getName(), metricData.getUnit(), prometheusType.name());
}
/**
* Objects of this class acts as mapping keys for Prometheus metric mapping cache used in {@link
* PrometheusMetricNameMapper}.
*/
@Immutable
@AutoValue
abstract static class ImmutableMappingKey {
static ImmutableMappingKey create(
String rawMetricName, String rawMetricUnit, String prometheusType) {
return new AutoValue_PrometheusMetricNameMapper_ImmutableMappingKey(
rawMetricName, rawMetricUnit, prometheusType);
}
abstract String rawMetricName();
abstract String rawMetricUnit();
abstract String prometheusType();
}
}

View File

@ -0,0 +1,58 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import io.opentelemetry.sdk.common.CompletableResultCode;
import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.export.CollectionRegistration;
import io.opentelemetry.sdk.metrics.export.MetricReader;
import io.prometheus.metrics.model.registry.MultiCollector;
import io.prometheus.metrics.model.snapshots.MetricSnapshots;
/**
* This is the bridge between Prometheus and OpenTelemetry.
*
* <p>The {@link PrometheusMetricReader} is a Prometheus {@link MultiCollector} and can be
* registered with the {@link io.prometheus.metrics.model.registry.PrometheusRegistry
* PrometheusRegistry}. It's also an OpenTelemetry {@link MetricReader} and can be registered with a
* {@link io.opentelemetry.sdk.metrics.SdkMeterProvider SdkMeterProvider}.
*/
public class PrometheusMetricReader implements MetricReader, MultiCollector {
private volatile CollectionRegistration collectionRegistration = CollectionRegistration.noop();
private final Otel2PrometheusConverter converter;
/** See {@link Otel2PrometheusConverter#Otel2PrometheusConverter(boolean)}. */
public PrometheusMetricReader(boolean otelScopeEnabled) {
this.converter = new Otel2PrometheusConverter(otelScopeEnabled);
}
@Override
public AggregationTemporality getAggregationTemporality(InstrumentType instrumentType) {
return AggregationTemporality.CUMULATIVE;
}
@Override
public void register(CollectionRegistration registration) {
this.collectionRegistration = registration;
}
@Override
public CompletableResultCode forceFlush() {
return CompletableResultCode.ofSuccess();
}
@Override
public CompletableResultCode shutdown() {
return CompletableResultCode.ofSuccess();
}
@Override
public MetricSnapshots collect() {
return converter.convert(collectionRegistration.collectAllMetrics());
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.SumData;
// Four types we use are same in prometheus and openmetrics format
enum PrometheusType {
GAUGE("gauge"),
COUNTER("counter"),
SUMMARY("summary"),
HISTOGRAM("histogram");
private final String typeString;
PrometheusType(String typeString) {
this.typeString = typeString;
}
static PrometheusType forMetric(MetricData metric) {
switch (metric.getType()) {
case LONG_GAUGE:
case DOUBLE_GAUGE:
return GAUGE;
case LONG_SUM:
SumData<LongPointData> longSumData = metric.getLongSumData();
if (longSumData.isMonotonic()) {
return COUNTER;
}
return GAUGE;
case DOUBLE_SUM:
SumData<DoublePointData> doubleSumData = metric.getDoubleSumData();
if (doubleSumData.isMonotonic()) {
return COUNTER;
}
return GAUGE;
case SUMMARY:
return SUMMARY;
case HISTOGRAM:
case EXPONENTIAL_HISTOGRAM:
return HISTOGRAM;
}
throw new IllegalArgumentException(
"Unsupported metric type, this generally indicates version misalignment "
+ "among opentelemetry dependencies. Please make sure to use opentelemetry-bom.");
}
String getTypeString() {
return typeString;
}
}

View File

@ -5,208 +5,94 @@
package io.opentelemetry.exporter.prometheus;
import static io.opentelemetry.exporter.prometheus.NameSanitizer.SANITIZE_CONSECUTIVE_UNDERSCORES;
import io.prometheus.metrics.model.snapshots.Unit;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.annotation.Nullable;
import io.opentelemetry.api.internal.StringUtils;
import java.util.regex.Pattern;
/** Convert OpenTelemetry unit names to Prometheus units. */
class PrometheusUnitsHelper {
/**
* A utility class that contains helper function(s) to aid conversion from OTLP to Prometheus units.
*
* @see <a
* href="https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#units-and-base-units">OpenMetrics
* specification for units</a>
* @see <a href="https://prometheus.io/docs/practices/naming/#base-units">Prometheus best practices
* for units</a>
*/
final class PrometheusUnitsHelper {
private static final Map<String, String> pluralNames = new ConcurrentHashMap<>();
private static final Map<String, String> singularNames = new ConcurrentHashMap<>();
private static final Map<String, Unit> predefinedUnits = new ConcurrentHashMap<>();
private static final Pattern INVALID_CHARACTERS_PATTERN = Pattern.compile("[^a-zA-Z0-9]");
private static final Pattern CHARACTERS_BETWEEN_BRACES_PATTERN = Pattern.compile("\\{(.*?)}");
private static final Pattern SANITIZE_LEADING_UNDERSCORES = Pattern.compile("^_+");
private static final Pattern SANITIZE_TRAILING_UNDERSCORES = Pattern.compile("_+$");
private PrometheusUnitsHelper() {
// Prevent object creation for utility classes
// See
// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/c3b2997563106e11d39f66eec629fde25dce2bdd/pkg/translator/prometheus/normalize_name.go#L19-L19
static {
// Time
initUnit("a", "years", "year");
initUnit("mo", "months", "month");
initUnit("wk", "weeks", "week");
initUnit("d", "days", "day");
initUnit("h", "hours", "hour");
initUnit("min", "minutes", "minute");
initUnit("s", "seconds", "second");
initUnit("ms", "milliseconds", "millisecond");
initUnit("us", "microseconds", "microsecond");
initUnit("ns", "nanoseconds", "nanosecond");
// Bytes
initUnit("By", "bytes", "byte");
initUnit("KiBy", "kibibytes", "kibibyte");
initUnit("MiBy", "mebibytes", "mebibyte");
initUnit("GiBy", "gibibytes", "gibibyte");
initUnit("TiBy", "tibibytes", "tibibyte");
initUnit("KBy", "kilobytes", "kilobyte");
initUnit("MBy", "megabytes", "megabyte");
initUnit("GBy", "gigabytes", "gigabyte");
initUnit("TBy", "terabytes", "terabyte");
// SI
initUnit("m", "meters", "meter");
initUnit("V", "volts", "volt");
initUnit("A", "amperes", "ampere");
initUnit("J", "joules", "joule");
initUnit("W", "watts", "watt");
initUnit("g", "grams", "gram");
// Misc
initUnit("Cel", "celsius");
initUnit("Hz", "hertz");
initUnit("%", "percent");
initUnit("1", "ratio");
}
/**
* A utility function that returns the equivalent Prometheus name for the provided OTLP metric
* unit.
*
* @param rawMetricUnitName The raw metric unit for which Prometheus metric unit needs to be
* computed.
* @return the computed Prometheus metric unit equivalent of the OTLP metric un
*/
static String getEquivalentPrometheusUnit(String rawMetricUnitName) {
if (StringUtils.isNullOrEmpty(rawMetricUnitName)) {
return rawMetricUnitName;
private PrometheusUnitsHelper() {}
private static void initUnit(String otelName, String pluralName) {
pluralNames.put(otelName, pluralName);
predefinedUnits.put(otelName, new Unit(pluralName));
}
private static void initUnit(String otelName, String pluralName, String singularName) {
initUnit(otelName, pluralName);
singularNames.put(otelName, singularName);
}
@Nullable
static Unit convertUnit(String otelUnit) {
if (otelUnit.isEmpty() || otelUnit.equals("1")) {
// The spec says "1" should be translated to "ratio", but this is not implemented in the Java
// SDK.
return null;
}
// Drop units specified between curly braces
String convertedMetricUnitName = removeUnitPortionInBraces(rawMetricUnitName);
// Handling for the "per" unit(s), e.g. foo/bar -> foo_per_bar
convertedMetricUnitName = convertRateExpressedToPrometheusUnit(convertedMetricUnitName);
// Converting abbreviated unit names to full names
return cleanUpString(getPrometheusUnit(convertedMetricUnitName));
}
/**
* This method is used to convert the units expressed as a rate via '/' symbol in their name to
* their expanded text equivalent. For instance, km/h => km_per_hour. The method operates on the
* input by splitting it in 2 parts - before and after '/' symbol and will attempt to expand any
* known unit abbreviation in both parts. Unknown abbreviations & unsupported characters will
* remain unchanged in the final output of this function.
*
* @param rateExpressedUnit The rate unit input that needs to be converted to its text equivalent.
* @return The text equivalent of unit expressed as rate. If the input does not contain '/', the
* function returns it as-is.
*/
private static String convertRateExpressedToPrometheusUnit(String rateExpressedUnit) {
if (!rateExpressedUnit.contains("/")) {
return rateExpressedUnit;
if (otelUnit.contains("{")) {
otelUnit = otelUnit.replaceAll("\\{[^}]*}", "").trim();
if (otelUnit.isEmpty() || otelUnit.equals("/")) {
return null;
}
}
String[] rateEntities = rateExpressedUnit.split("/", 2);
// Only convert rate expressed units if it's a valid expression
if (rateEntities[1].equals("")) {
return rateExpressedUnit;
if (predefinedUnits.containsKey(otelUnit)) {
return predefinedUnits.get(otelUnit);
}
return getPrometheusUnit(rateEntities[0]) + "_per_" + getPrometheusPerUnit(rateEntities[1]);
}
/**
* This method drops all characters enclosed within '{}' (including the curly braces) by replacing
* them with an empty string. Note that this method will not produce the intended effect if there
* are nested curly braces within the outer enclosure of '{}'.
*
* <p>For instance, {packet{s}s} => s}.
*
* @param unit The input unit from which text within curly braces needs to be removed.
* @return The resulting unit after removing the text within '{}'.
*/
private static String removeUnitPortionInBraces(String unit) {
return CHARACTERS_BETWEEN_BRACES_PATTERN.matcher(unit).replaceAll("");
}
/**
* Replaces all characters that are not a letter or a digit with '_' to make the resulting string
* Prometheus compliant. This method also removes leading and trailing underscores - this is done
* to keep the resulting unit similar to what is produced from the collector's implementation.
*
* @param string The string input that needs to be made Prometheus compliant.
* @return the cleaned-up Prometheus compliant string.
*/
private static String cleanUpString(String string) {
return SANITIZE_LEADING_UNDERSCORES
.matcher(
SANITIZE_TRAILING_UNDERSCORES
.matcher(
SANITIZE_CONSECUTIVE_UNDERSCORES
.matcher(INVALID_CHARACTERS_PATTERN.matcher(string).replaceAll("_"))
.replaceAll("_"))
.replaceAll(""))
.replaceAll("");
}
/**
* This method retrieves the expanded Prometheus unit name for known abbreviations. OTLP metrics
* use the c/s notation as specified at <a href="https://ucum.org/ucum.html">UCUM</a>. The list of
* mappings is adopted from <a
* href="https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/9a9d4778bbbf242dba233db28e2fbcfda3416959/pkg/translator/prometheus/normalize_name.go#L30">OpenTelemetry
* Collector Contrib</a>.
*
* @param unitAbbreviation The unit that name that needs to be expanded/converted to Prometheus
* units.
* @return The expanded/converted unit name if known, otherwise returns the input unit name as-is.
*/
private static String getPrometheusUnit(String unitAbbreviation) {
switch (unitAbbreviation) {
// Time
case "d":
return "days";
case "h":
return "hours";
case "min":
return "minutes";
case "s":
return "seconds";
case "ms":
return "milliseconds";
case "us":
return "microseconds";
case "ns":
return "nanoseconds";
// Bytes
case "By":
return "bytes";
case "KiBy":
return "kibibytes";
case "MiBy":
return "mebibytes";
case "GiBy":
return "gibibytes";
case "TiBy":
return "tibibytes";
case "KBy":
return "kilobytes";
case "MBy":
return "megabytes";
case "GBy":
return "gigabytes";
case "TBy":
return "terabytes";
// SI
case "m":
return "meters";
case "V":
return "volts";
case "A":
return "amperes";
case "J":
return "joules";
case "W":
return "watts";
case "g":
return "grams";
// Misc
case "Cel":
return "celsius";
case "Hz":
return "hertz";
case "1":
return "";
case "%":
return "percent";
default:
return unitAbbreviation;
}
}
/**
* This method retrieves the expanded Prometheus unit name to be used with "per" units for known
* units. For example: s => per second (singular)
*
* @param perUnitAbbreviation The unit abbreviation used in a 'per' unit.
* @return The expanded unit equivalent to be used in 'per' unit if the input is a known unit,
* otherwise returns the input as-is.
*/
private static String getPrometheusPerUnit(String perUnitAbbreviation) {
switch (perUnitAbbreviation) {
case "s":
return "second";
case "m":
return "minute";
case "h":
return "hour";
case "d":
return "day";
case "w":
return "week";
case "mo":
return "month";
case "y":
return "year";
default:
return perUnitAbbreviation;
if (otelUnit.contains("/")) {
String[] parts = otelUnit.split("/", 2);
String part1 = pluralNames.getOrDefault(parts[0], parts[0]).trim();
String part2 = singularNames.getOrDefault(parts[1], parts[1]).trim();
if (part1.isEmpty()) {
return new Unit("per_" + part2);
} else {
return new Unit(part1 + "_per_" + part2);
}
}
return new Unit(otelUnit);
}
}

View File

@ -1,707 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
// Includes work from:
/*
* Prometheus instrumentation library for JVM applications
* Copyright 2012-2015 The Prometheus Authors
*
* This product includes software developed at
* Boxever Ltd. (http://www.boxever.com/).
*
* This product includes software developed at
* SoundCloud Ltd. (http://soundcloud.com/).
*
* This product includes software developed as part of the
* Ocelli project by Netflix Inc. (https://github.com/Netflix/ocelli/).
*/
package io.opentelemetry.exporter.prometheus;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.data.ValueAtQuantile;
import io.opentelemetry.sdk.resources.Resource;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import java.util.function.Predicate;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nullable;
/** Serializes metrics into Prometheus exposition formats. */
// Adapted from
// https://github.com/prometheus/client_java/blob/master/simpleclient_common/src/main/java/io/prometheus/client/exporter/common/TextFormat.java
abstract class Serializer {
private static final Logger LOGGER = Logger.getLogger(Serializer.class.getName());
private static final ThrottlingLogger THROTTLING_LOGGER = new ThrottlingLogger(LOGGER);
static Serializer create(@Nullable String acceptHeader, Predicate<String> filter) {
if (acceptHeader == null) {
return new Prometheus004Serializer(filter);
}
for (String accepts : acceptHeader.split(",")) {
if ("application/openmetrics-text".equals(accepts.split(";")[0].trim())) {
return new OpenMetrics100Serializer(filter);
}
}
return new Prometheus004Serializer(filter);
}
private final Predicate<String> metricNameFilter;
Serializer(Predicate<String> metricNameFilter) {
this.metricNameFilter = metricNameFilter;
}
abstract String contentType();
abstract String headerName(String name, MetricData rawMetric, PrometheusType type);
abstract void writeHelp(Writer writer, String description) throws IOException;
abstract void writeTimestamp(Writer writer, long timestampNanos) throws IOException;
abstract void writeExemplar(
Writer writer,
Collection<? extends ExemplarData> exemplars,
double minExemplar,
double maxExemplar)
throws IOException;
abstract void writeEof(Writer writer) throws IOException;
final Set<String> write(Collection<MetricData> metrics, OutputStream output) throws IOException {
Set<String> conflictMetricNames = new HashSet<>();
Map<String, List<MetricData>> metricsByName = new LinkedHashMap<>();
Set<InstrumentationScopeInfo> scopes = new LinkedHashSet<>();
// Iterate through metrics, filtering and grouping by headerName
for (MetricData metric : metrics) {
// Not supported in specification yet.
if (metric.getType() == MetricDataType.EXPONENTIAL_HISTOGRAM) {
continue;
}
// PrometheusHttpServer#getAggregationTemporality specifies cumulative temporality for
// all instruments, but non-SDK MetricProducers may not conform. We drop delta
// temporality metrics to avoid the complexity of stateful transformation to cumulative.
if (isDeltaTemporality(metric)) {
continue;
}
PrometheusType prometheusType = PrometheusType.forMetric(metric);
String metricName = PrometheusMetricNameMapper.INSTANCE.apply(metric, prometheusType);
// Skip metrics which do not pass metricNameFilter
if (!metricNameFilter.test(metricName)) {
continue;
}
List<MetricData> metricsWithHeaderName =
metricsByName.computeIfAbsent(metricName, unused -> new ArrayList<>());
// Skip metrics with the same name but different type
if (metricsWithHeaderName.size() > 0
&& prometheusType != PrometheusType.forMetric(metricsWithHeaderName.get(0))) {
conflictMetricNames.add(metricName);
continue;
}
metricsWithHeaderName.add(metric);
scopes.add(metric.getInstrumentationScopeInfo());
}
Optional<Resource> optResource = metrics.stream().findFirst().map(MetricData::getResource);
try (Writer writer =
new BufferedWriter(new OutputStreamWriter(output, StandardCharsets.UTF_8))) {
if (optResource.isPresent()) {
writeResource(optResource.get(), writer);
}
for (InstrumentationScopeInfo scope : scopes) {
writeScopeInfo(scope, writer);
}
for (Map.Entry<String, List<MetricData>> entry : metricsByName.entrySet()) {
write(entry.getValue(), entry.getKey(), writer);
}
writeEof(writer);
}
return conflictMetricNames;
}
private void write(List<MetricData> metrics, String metricName, Writer writer)
throws IOException {
// Write header based on first metric
MetricData first = metrics.get(0);
PrometheusType type = PrometheusType.forMetric(first);
String headerName = headerName(metricName, first, type);
String description = metrics.get(0).getDescription();
writer.write("# TYPE ");
writer.write(headerName);
writer.write(' ');
writer.write(type.getTypeString());
writer.write('\n');
writer.write("# HELP ");
writer.write(headerName);
writer.write(' ');
writeHelp(writer, description);
writer.write('\n');
// Then write the metrics.
for (MetricData metric : metrics) {
write(metric, metricName, writer);
}
}
private void write(MetricData metric, String metricName, Writer writer) throws IOException {
for (PointData point : getPoints(metric)) {
switch (metric.getType()) {
case DOUBLE_SUM:
case DOUBLE_GAUGE:
writePoint(
writer,
metric.getInstrumentationScopeInfo(),
metricName,
((DoublePointData) point).getValue(),
point.getAttributes(),
point.getEpochNanos());
break;
case LONG_SUM:
case LONG_GAUGE:
writePoint(
writer,
metric.getInstrumentationScopeInfo(),
metricName,
(double) ((LongPointData) point).getValue(),
point.getAttributes(),
point.getEpochNanos());
break;
case HISTOGRAM:
writeHistogram(
writer, metric.getInstrumentationScopeInfo(), metricName, (HistogramPointData) point);
break;
case SUMMARY:
writeSummary(
writer, metric.getInstrumentationScopeInfo(), metricName, (SummaryPointData) point);
break;
case EXPONENTIAL_HISTOGRAM:
throw new IllegalArgumentException("Can't happen");
}
}
}
private static boolean isDeltaTemporality(MetricData metricData) {
switch (metricData.getType()) {
case LONG_GAUGE:
case DOUBLE_GAUGE:
case SUMMARY:
return false;
case LONG_SUM:
return metricData.getLongSumData().getAggregationTemporality()
== AggregationTemporality.DELTA;
case DOUBLE_SUM:
return metricData.getDoubleSumData().getAggregationTemporality()
== AggregationTemporality.DELTA;
case HISTOGRAM:
return metricData.getHistogramData().getAggregationTemporality()
== AggregationTemporality.DELTA;
default:
}
throw new IllegalArgumentException("Can't happen");
}
private static void writeResource(Resource resource, Writer writer) throws IOException {
if (resource.getAttributes().isEmpty()) {
return;
}
writer.write("# TYPE target info\n");
writer.write("# HELP target Target metadata\n");
writer.write("target_info{");
writeAttributePairs(writer, /* initialComma= */ false, resource.getAttributes());
writer.write("} 1\n");
}
private static void writeScopeInfo(
InstrumentationScopeInfo instrumentationScopeInfo, Writer writer) throws IOException {
if (instrumentationScopeInfo.getAttributes().isEmpty()) {
return;
}
writer.write("# TYPE otel_scope_info info\n");
writer.write("# HELP otel_scope_info Scope metadata\n");
writer.write("otel_scope_info{");
writeScopeNameAndVersion(writer, instrumentationScopeInfo);
writeAttributePairs(writer, /* initialComma= */ true, instrumentationScopeInfo.getAttributes());
writer.write("} 1\n");
}
private void writeHistogram(
Writer writer,
InstrumentationScopeInfo instrumentationScopeInfo,
String name,
HistogramPointData point)
throws IOException {
writePoint(
writer,
instrumentationScopeInfo,
name + "_count",
(double) point.getCount(),
point.getAttributes(),
point.getEpochNanos());
writePoint(
writer,
instrumentationScopeInfo,
name + "_sum",
point.getSum(),
point.getAttributes(),
point.getEpochNanos());
long cumulativeCount = 0;
List<Long> counts = point.getCounts();
for (int i = 0; i < counts.size(); i++) {
// This is the upper boundary (inclusive). I.e. all values should be < this value (LE -
// Less-then-or-Equal).
double boundary = getBucketUpperBound(point, i);
cumulativeCount += counts.get(i);
writePoint(
writer,
instrumentationScopeInfo,
name + "_bucket",
(double) cumulativeCount,
point.getAttributes(),
point.getEpochNanos(),
"le",
boundary,
point.getExemplars(),
getBucketLowerBound(point, i),
boundary);
}
}
/**
* Returns the lower bound of a bucket (all values would have been greater than).
*
* @param bucketIndex The bucket index, should match {@link HistogramPointData#getCounts()} index.
*/
static double getBucketLowerBound(HistogramPointData point, int bucketIndex) {
return bucketIndex > 0 ? point.getBoundaries().get(bucketIndex - 1) : Double.NEGATIVE_INFINITY;
}
/**
* Returns the upper inclusive bound of a bucket (all values would have been less then or equal).
*
* @param bucketIndex The bucket index, should match {@link HistogramPointData#getCounts()} index.
*/
static double getBucketUpperBound(HistogramPointData point, int bucketIndex) {
List<Double> boundaries = point.getBoundaries();
return (bucketIndex < boundaries.size())
? boundaries.get(bucketIndex)
: Double.POSITIVE_INFINITY;
}
private void writeSummary(
Writer writer,
InstrumentationScopeInfo instrumentationScopeInfo,
String name,
SummaryPointData point)
throws IOException {
writePoint(
writer,
instrumentationScopeInfo,
name + "_count",
(double) point.getCount(),
point.getAttributes(),
point.getEpochNanos());
writePoint(
writer,
instrumentationScopeInfo,
name + "_sum",
point.getSum(),
point.getAttributes(),
point.getEpochNanos());
List<ValueAtQuantile> valueAtQuantiles = point.getValues();
for (ValueAtQuantile valueAtQuantile : valueAtQuantiles) {
writePoint(
writer,
instrumentationScopeInfo,
name,
valueAtQuantile.getValue(),
point.getAttributes(),
point.getEpochNanos(),
"quantile",
valueAtQuantile.getQuantile(),
Collections.emptyList(),
0,
0);
}
}
private void writePoint(
Writer writer,
InstrumentationScopeInfo instrumentationScopeInfo,
String name,
double value,
Attributes attributes,
long epochNanos)
throws IOException {
writer.write(name);
writeAttributes(writer, instrumentationScopeInfo, attributes);
writer.write(' ');
writeDouble(writer, value);
writer.write(' ');
writeTimestamp(writer, epochNanos);
writer.write('\n');
}
private void writePoint(
Writer writer,
InstrumentationScopeInfo instrumentationScopeInfo,
String name,
double value,
Attributes attributes,
long epochNanos,
String additionalAttrKey,
double additionalAttrValue,
Collection<? extends ExemplarData> exemplars,
double minExemplar,
double maxExemplar)
throws IOException {
writer.write(name);
writeAttributes(
writer, instrumentationScopeInfo, attributes, additionalAttrKey, additionalAttrValue);
writer.write(' ');
writeDouble(writer, value);
writer.write(' ');
writeTimestamp(writer, epochNanos);
writeExemplar(writer, exemplars, minExemplar, maxExemplar);
writer.write('\n');
}
private static void writeAttributes(
Writer writer, InstrumentationScopeInfo instrumentationScopeInfo, Attributes attributes)
throws IOException {
writer.write('{');
writeScopeNameAndVersion(writer, instrumentationScopeInfo);
if (!attributes.isEmpty()) {
writeAttributePairs(writer, /* initialComma= */ true, attributes);
}
writer.write('}');
}
private static void writeAttributes(
Writer writer,
InstrumentationScopeInfo instrumentationScopeInfo,
Attributes attributes,
String additionalAttrKey,
double additionalAttrValue)
throws IOException {
writer.write('{');
writeScopeNameAndVersion(writer, instrumentationScopeInfo);
writer.write(',');
if (!attributes.isEmpty()) {
writeAttributePairs(writer, /* initialComma= */ false, attributes);
writer.write(',');
}
writer.write(additionalAttrKey);
writer.write("=\"");
writeDouble(writer, additionalAttrValue);
writer.write('"');
writer.write('}');
}
private static void writeScopeNameAndVersion(
Writer writer, InstrumentationScopeInfo instrumentationScopeInfo) throws IOException {
writer.write("otel_scope_name=\"");
writer.write(instrumentationScopeInfo.getName());
writer.write("\"");
if (instrumentationScopeInfo.getVersion() != null) {
writer.write(",otel_scope_version=\"");
writer.write(instrumentationScopeInfo.getVersion());
writer.write("\"");
}
}
private static void writeAttributePairs(
Writer writer, boolean initialComma, Attributes attributes) throws IOException {
try {
// This logic handles colliding attribute keys by joining the values,
// separated by a semicolon. It relies on the attributes being sorted, so that
// colliding attribute keys are in subsequent iterations of the for loop.
attributes.forEach(
new BiConsumer<AttributeKey<?>, Object>() {
boolean initialAttribute = true;
String previousKey = "";
String previousValue = "";
@Override
public void accept(AttributeKey<?> key, Object value) {
try {
String sanitizedKey = NameSanitizer.INSTANCE.apply(key.getKey());
int compare = sanitizedKey.compareTo(previousKey);
if (compare == 0) {
// This key collides with the previous one. Append the value
// to the previous value instead of writing the key again.
writer.write(';');
} else {
if (compare < 0) {
THROTTLING_LOGGER.log(
Level.WARNING,
"Dropping out-of-order attribute "
+ sanitizedKey
+ "="
+ value
+ ", which occurred after "
+ previousKey
+ ". This can occur when an alternative Attribute implementation is used.");
}
if (!initialAttribute) {
writer.write('"');
}
if (initialComma || !initialAttribute) {
writer.write(',');
}
writer.write(sanitizedKey);
writer.write("=\"");
}
String stringValue = value.toString();
writeEscapedLabelValue(writer, stringValue);
previousKey = sanitizedKey;
previousValue = stringValue;
initialAttribute = false;
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
});
if (!attributes.isEmpty()) {
writer.write('"');
}
} catch (UncheckedIOException e) {
throw e.getCause();
}
}
private static void writeDouble(Writer writer, double d) throws IOException {
if (d == Double.POSITIVE_INFINITY) {
writer.write("+Inf");
} else if (d == Double.NEGATIVE_INFINITY) {
writer.write("-Inf");
} else {
writer.write(Double.toString(d));
}
}
static void writeEscapedLabelValue(Writer writer, String s) throws IOException {
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
switch (c) {
case '\\':
writer.write("\\\\");
break;
case '\"':
writer.write("\\\"");
break;
case '\n':
writer.write("\\n");
break;
default:
writer.write(c);
}
}
}
static class Prometheus004Serializer extends Serializer {
Prometheus004Serializer(Predicate<String> metricNameFilter) {
super(metricNameFilter);
}
@Override
String contentType() {
return "text/plain; version=0.0.4; charset=utf-8";
}
@Override
String headerName(String name, MetricData rawMetric, PrometheusType type) {
return name;
}
@Override
void writeHelp(Writer writer, String help) throws IOException {
for (int i = 0; i < help.length(); i++) {
char c = help.charAt(i);
switch (c) {
case '\\':
writer.write("\\\\");
break;
case '\n':
writer.write("\\n");
break;
default:
writer.write(c);
}
}
}
@Override
void writeTimestamp(Writer writer, long timestampNanos) throws IOException {
writer.write(Long.toString(TimeUnit.NANOSECONDS.toMillis(timestampNanos)));
}
@Override
void writeExemplar(
Writer writer,
Collection<? extends ExemplarData> exemplars,
double minExemplar,
double maxExemplar) {
// Don't write exemplars
}
@Override
void writeEof(Writer writer) {
// Don't write EOF
}
}
static class OpenMetrics100Serializer extends Serializer {
OpenMetrics100Serializer(Predicate<String> metricNameFilter) {
super(metricNameFilter);
}
@Override
String contentType() {
return "application/openmetrics-text; version=1.0.0; charset=utf-8";
}
@Override
String headerName(String name, MetricData rawMetric, PrometheusType type) {
// If the name didn't originally have a _total suffix, and we added it later, omit it from the
// header.
String sanitizedOriginalName = NameSanitizer.INSTANCE.apply(rawMetric.getName());
if (!sanitizedOriginalName.endsWith("_total") && (type == PrometheusType.COUNTER)) {
return name.substring(0, name.length() - "_total".length());
}
return name;
}
@Override
void writeHelp(Writer writer, String description) throws IOException {
writeEscapedLabelValue(writer, description);
}
@Override
void writeTimestamp(Writer writer, long timestampNanos) throws IOException {
long timestampMillis = TimeUnit.NANOSECONDS.toMillis(timestampNanos);
writer.write(Long.toString(timestampMillis / 1000));
writer.write(".");
long millis = timestampMillis % 1000;
if (millis < 100) {
writer.write('0');
}
if (millis < 10) {
writer.write('0');
}
writer.write(Long.toString(millis));
}
@Override
void writeExemplar(
Writer writer,
Collection<? extends ExemplarData> exemplars,
double minExemplar,
double maxExemplar)
throws IOException {
for (ExemplarData exemplar : exemplars) {
double value = getExemplarValue(exemplar);
if (value > minExemplar && value <= maxExemplar) {
writer.write(" # {");
SpanContext spanContext = exemplar.getSpanContext();
if (spanContext.isValid()) {
// NB: Output sorted to match prometheus client library even though it shouldn't matter.
// OTel generally outputs in trace_id span_id order though so we can consider breaking
// from reference implementation if it makes sense.
writer.write("span_id=\"");
writer.write(spanContext.getSpanId());
writer.write("\",trace_id=\"");
writer.write(spanContext.getTraceId());
writer.write('"');
}
writer.write("} ");
writeDouble(writer, value);
writer.write(' ');
writeTimestamp(writer, exemplar.getEpochNanos());
// Only write one exemplar.
return;
}
}
}
@Override
void writeEof(Writer writer) throws IOException {
writer.write("# EOF\n");
}
}
static Collection<? extends PointData> getPoints(MetricData metricData) {
switch (metricData.getType()) {
case DOUBLE_GAUGE:
return metricData.getDoubleGaugeData().getPoints();
case DOUBLE_SUM:
return metricData.getDoubleSumData().getPoints();
case LONG_GAUGE:
return metricData.getLongGaugeData().getPoints();
case LONG_SUM:
return metricData.getLongSumData().getPoints();
case SUMMARY:
return metricData.getSummaryData().getPoints();
case HISTOGRAM:
return metricData.getHistogramData().getPoints();
case EXPONENTIAL_HISTOGRAM:
return metricData.getExponentialHistogramData().getPoints();
}
return Collections.emptyList();
}
private static double getExemplarValue(ExemplarData exemplar) {
return exemplar instanceof DoubleExemplarData
? ((DoubleExemplarData) exemplar).getValue()
: (double) ((LongExemplarData) exemplar).getValue();
}
}

View File

@ -1,63 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Stream;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
class NameSanitizerTest {
@Test
void testSanitizerCaching() {
AtomicInteger count = new AtomicInteger();
Function<String, String> delegate = labelName -> labelName + count.incrementAndGet();
NameSanitizer sanitizer = new NameSanitizer(delegate);
String labelName = "http.name";
assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1");
assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1");
assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1");
assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1");
assertThat(sanitizer.apply(labelName)).isEqualTo("http.name1");
assertThat(count).hasValue(1);
}
@ParameterizedTest
@MethodSource("provideMetricNamesForTest")
void testSanitizerCleansing(String unsanitizedName, String sanitizedName) {
Assertions.assertEquals(sanitizedName, NameSanitizer.INSTANCE.apply(unsanitizedName));
}
private static Stream<Arguments> provideMetricNamesForTest() {
return Stream.of(
// valid name - already sanitized
Arguments.of(
"active_directory_ds_replication_network_io",
"active_directory_ds_replication_network_io"),
// consecutive underscores
Arguments.of("cpu_sp__d_hertz", "cpu_sp_d_hertz"),
// leading and trailing underscores - should be fine
Arguments.of("_cpu_speed_hertz_", "_cpu_speed_hertz_"),
// unsupported characters replaced
Arguments.of("metric_unit_$1000", "metric_unit_1000"),
// multiple unsupported characters - whitespace
Arguments.of("sample_me%%$$$_count_ !!@unit include", "sample_me_count_unit_include"),
// metric names cannot start with a number
Arguments.of("1_some_metric_name", "_some_metric_name"),
// metric names can have :
Arguments.of("sample_metric_name__:_per_meter", "sample_metric_name_:_per_meter"),
// Illegal characters
Arguments.of("cpu_sp$$d_hertz", "cpu_sp_d_hertz"));
}
}

View File

@ -33,6 +33,7 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.resources.Resource;
import io.prometheus.metrics.exporter.httpserver.HTTPServer;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.ServerSocket;
@ -52,8 +53,6 @@ import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class PrometheusHttpServerTest {
private static final AtomicReference<List<MetricData>> metricData = new AtomicReference<>();
@ -65,7 +64,7 @@ class PrometheusHttpServerTest {
static WebClient client;
@RegisterExtension
LogCapturer logs = LogCapturer.create().captureForType(PrometheusHttpServer.class);
LogCapturer logs = LogCapturer.create().captureForType(Otel2PrometheusConverter.class);
@BeforeAll
static void beforeAll() {
@ -108,35 +107,32 @@ class PrometheusHttpServerTest {
.hasMessage("host must not be empty");
}
@ParameterizedTest
@ValueSource(strings = {"/metrics", "/"})
void fetchPrometheus(String endpoint) {
AggregatedHttpResponse response = client.get(endpoint).aggregate().join();
@Test
void fetchPrometheus() {
AggregatedHttpResponse response = client.get("/metrics").aggregate().join();
assertThat(response.status()).isEqualTo(HttpStatus.OK);
assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE))
.isEqualTo("text/plain; version=0.0.4; charset=utf-8");
assertThat(response.contentUtf8())
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
"# HELP grpc_name_total long_description\n"
+ "# TYPE grpc_name_total counter\n"
+ "# HELP grpc_name_total long_description\n"
+ "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n"
+ "# TYPE http_name_total counter\n"
+ "grpc_name_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n"
+ "# HELP http_name_total double_description\n"
+ "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0\n");
+ "# TYPE http_name_total counter\n"
+ "http_name_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n"
+ "# TYPE target_info gauge\n"
+ "target_info{kr=\"vr\"} 1\n");
}
@ParameterizedTest
@ValueSource(strings = {"/metrics", "/"})
void fetchOpenMetrics(String endpoint) {
@Test
void fetchOpenMetrics() {
AggregatedHttpResponse response =
client
.execute(
RequestHeaders.of(
HttpMethod.GET,
endpoint,
"/metrics",
HttpHeaderNames.ACCEPT,
"application/openmetrics-text"))
.aggregate()
@ -146,33 +142,35 @@ class PrometheusHttpServerTest {
.isEqualTo("application/openmetrics-text; version=1.0.0; charset=utf-8");
assertThat(response.contentUtf8())
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE grpc_name counter\n"
"# TYPE grpc_name counter\n"
+ "# HELP grpc_name long_description\n"
+ "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0.000\n"
+ "grpc_name_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n"
+ "# TYPE http_name counter\n"
+ "# HELP http_name double_description\n"
+ "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0.000\n"
+ "http_name_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n"
+ "# TYPE target info\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# EOF\n");
}
@Test
void fetchFiltered() {
AggregatedHttpResponse response =
client.get("/?name[]=grpc_name_total&name[]=bears_total").aggregate().join();
client
.get("/?name[]=grpc_name_total&name[]=bears_total&name[]=target_info")
.aggregate()
.join();
assertThat(response.status()).isEqualTo(HttpStatus.OK);
assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE))
.isEqualTo("text/plain; version=0.0.4; charset=utf-8");
assertThat(response.contentUtf8())
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE grpc_name_total counter\n"
""
+ "# HELP grpc_name_total long_description\n"
+ "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n");
+ "# TYPE grpc_name_total counter\n"
+ "grpc_name_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n"
+ "# TYPE target_info gauge\n"
+ "target_info{kr=\"vr\"} 1\n");
}
@Test
@ -182,7 +180,7 @@ class PrometheusHttpServerTest {
.decorator(RetryingClient.newDecorator(RetryRule.failsafe()))
.addHeader(HttpHeaderNames.ACCEPT_ENCODING, "gzip")
.build();
AggregatedHttpResponse response = client.get("/").aggregate().join();
AggregatedHttpResponse response = client.get("/metrics").aggregate().join();
assertThat(response.status()).isEqualTo(HttpStatus.OK);
assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE))
.isEqualTo("text/plain; version=0.0.4; charset=utf-8");
@ -191,15 +189,14 @@ class PrometheusHttpServerTest {
String content = new String(ByteStreams.toByteArray(gis), StandardCharsets.UTF_8);
assertThat(content)
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
"# HELP grpc_name_total long_description\n"
+ "# TYPE grpc_name_total counter\n"
+ "# HELP grpc_name_total long_description\n"
+ "grpc_name_total{otel_scope_name=\"grpc\",otel_scope_version=\"version\",kp=\"vp\"} 5.0 0\n"
+ "# TYPE http_name_total counter\n"
+ "grpc_name_total{kp=\"vp\",otel_scope_name=\"grpc\",otel_scope_version=\"version\"} 5.0\n"
+ "# HELP http_name_total double_description\n"
+ "http_name_total{otel_scope_name=\"http\",otel_scope_version=\"version\",kp=\"vp\"} 3.5 0\n");
+ "# TYPE http_name_total counter\n"
+ "http_name_total{kp=\"vp\",otel_scope_name=\"http\",otel_scope_version=\"version\"} 3.5\n"
+ "# TYPE target_info gauge\n"
+ "target_info{kr=\"vr\"} 1\n");
}
@Test
@ -216,7 +213,7 @@ class PrometheusHttpServerTest {
AggregatedHttpResponse response = client.get("/-/healthy").aggregate().join();
assertThat(response.status()).isEqualTo(HttpStatus.OK);
assertThat(response.contentUtf8()).isEqualTo("Exporter is Healthy.");
assertThat(response.contentUtf8()).isEqualTo("Exporter is healthy.\n");
}
@Test
@ -260,28 +257,22 @@ class PrometheusHttpServerTest {
Collections.singletonList(
ImmutableLongPointData.create(123, 456, Attributes.empty(), 3))))));
AggregatedHttpResponse response = client.get("/").aggregate().join();
AggregatedHttpResponse response = client.get("/metrics").aggregate().join();
assertThat(response.status()).isEqualTo(HttpStatus.OK);
assertThat(response.headers().get(HttpHeaderNames.CONTENT_TYPE))
.isEqualTo("text/plain; version=0.0.4; charset=utf-8");
assertThat(response.contentUtf8())
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE foo_unit_total counter\n"
+ "# HELP foo_unit_total description1\n"
+ "foo_unit_total{otel_scope_name=\"scope1\"} 1.0 0\n"
+ "foo_unit_total{otel_scope_name=\"scope2\"} 2.0 0\n");
"# TYPE foo_unit_total counter\n"
+ "foo_unit_total{otel_scope_name=\"scope1\"} 1.0\n"
+ "foo_unit_total{otel_scope_name=\"scope2\"} 2.0\n"
+ "# TYPE target_info gauge\n"
+ "target_info{kr=\"vr\"} 1\n");
// Validate conflict warning message
assertThat(logs.getEvents()).hasSize(1);
logs.assertContains(
"Metric conflict(s) detected. Multiple metrics with same name but different type: [foo_unit_total]");
// Make another request and confirm warning is only logged once
client.get("/").aggregate().join();
assertThat(logs.getEvents()).hasSize(1);
"Conflicting metric name foo_unit: Found one metric with type counter and one of type gauge. Dropping the one with type gauge.");
}
@Test
@ -293,8 +284,9 @@ class PrometheusHttpServerTest {
@Test
void defaultExecutor() {
assertThat(prometheusServer)
.extracting("executor", as(InstanceOfAssertFactories.type(ThreadPoolExecutor.class)))
.satisfies(executor -> assertThat(executor.getCorePoolSize()).isEqualTo(5));
.extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class)))
.extracting("executorService", as(InstanceOfAssertFactories.type(ThreadPoolExecutor.class)))
.satisfies(executor -> assertThat(executor.getCorePoolSize()).isEqualTo(1));
}
@Test
@ -311,8 +303,10 @@ class PrometheusHttpServerTest {
.setExecutor(scheduledExecutor)
.build()) {
assertThat(server)
.extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class)))
.extracting(
"executor", as(InstanceOfAssertFactories.type(ScheduledThreadPoolExecutor.class)))
"executorService",
as(InstanceOfAssertFactories.type(ScheduledThreadPoolExecutor.class)))
.satisfies(executor -> assertThat(executor).isSameAs(scheduledExecutor));
}
}

View File

@ -1,163 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_HISTOGRAM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE;
import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_LONG_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.SUMMARY;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import java.util.stream.Stream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
class PrometheusMetricNameMapperTest {
@Test
void prometheusMetricNameMapperCaching() {
AtomicInteger count = new AtomicInteger();
BiFunction<MetricData, PrometheusType, String> delegate =
(metricData, prometheusType) ->
String.join(
"_",
metricData.getName(),
prometheusType.name(),
Integer.toString(count.incrementAndGet()));
PrometheusMetricNameMapper mapper = new PrometheusMetricNameMapper(delegate);
assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE))
.isEqualTo("monotonic.cumulative.long.sum_GAUGE_1");
assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE))
.isEqualTo("monotonic.cumulative.long.sum_GAUGE_1");
assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE))
.isEqualTo("monotonic.cumulative.long.sum_GAUGE_1");
assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE))
.isEqualTo("monotonic.cumulative.long.sum_GAUGE_1");
assertThat(mapper.apply(MONOTONIC_CUMULATIVE_LONG_SUM, PrometheusType.GAUGE))
.isEqualTo("monotonic.cumulative.long.sum_GAUGE_1");
assertThat(count).hasValue(1);
}
@ParameterizedTest
@MethodSource("provideRawMetricDataForTest")
void metricNameSerializationTest(MetricData metricData, String expectedSerializedName) {
assertEquals(
expectedSerializedName,
PrometheusMetricNameMapper.INSTANCE.apply(
metricData, PrometheusType.forMetric(metricData)));
}
private static Stream<Arguments> provideRawMetricDataForTest() {
return Stream.of(
// special case for gauge
Arguments.of(createSampleMetricData("sample", "1", PrometheusType.GAUGE), "sample_ratio"),
// special case for gauge with drop - metric unit should match "1" to be converted to
// "ratio"
Arguments.of(
createSampleMetricData("sample", "1{dropped}", PrometheusType.GAUGE), "sample"),
// Gauge without "1" as unit
Arguments.of(createSampleMetricData("sample", "unit", PrometheusType.GAUGE), "sample_unit"),
// special case with counter
Arguments.of(
createSampleMetricData("sample", "unit", PrometheusType.COUNTER), "sample_unit_total"),
// special case unit "1", but no gauge - "1" is dropped
Arguments.of(createSampleMetricData("sample", "1", PrometheusType.COUNTER), "sample_total"),
// units expressed as numbers other than 1 are retained
Arguments.of(
createSampleMetricData("sample", "2", PrometheusType.COUNTER), "sample_2_total"),
// metric name with unsupported characters
Arguments.of(
createSampleMetricData("s%%ple", "%/m", PrometheusType.SUMMARY),
"s_ple_percent_per_minute"),
// metric name with dropped portions
Arguments.of(
createSampleMetricData("s%%ple", "%/m", PrometheusType.SUMMARY),
"s_ple_percent_per_minute"),
// metric unit as a number other than 1 is not treated specially
Arguments.of(
createSampleMetricData("metric_name", "2", PrometheusType.SUMMARY), "metric_name_2"),
// metric unit is not appended if the name already contains the unit
Arguments.of(
createSampleMetricData("metric_name_total", "total", PrometheusType.COUNTER),
"metric_name_total"),
// metric unit is not appended if the name already contains the unit - special case for
// total with non-counter type
Arguments.of(
createSampleMetricData("metric_name_total", "total", PrometheusType.SUMMARY),
"metric_name_total"),
// metric unit not appended if present in metric name - special case for ratio
Arguments.of(
createSampleMetricData("metric_name_ratio", "1", PrometheusType.GAUGE),
"metric_name_ratio"),
// metric unit not appended if present in metric name - special case for ratio - unit not
// gauge
Arguments.of(
createSampleMetricData("metric_name_ratio", "1", PrometheusType.SUMMARY),
"metric_name_ratio"),
// metric unit is not appended if the name already contains the unit - unit can be anywhere
Arguments.of(
createSampleMetricData("metric_hertz", "hertz", PrometheusType.GAUGE), "metric_hertz"),
// metric unit is not appended if the name already contains the unit - applies to every unit
Arguments.of(
createSampleMetricData("metric_hertz_total", "hertz_total", PrometheusType.COUNTER),
"metric_hertz_total"),
// metric unit is not appended if the name already contains the unit - order matters
Arguments.of(
createSampleMetricData("metric_total_hertz", "hertz_total", PrometheusType.COUNTER),
"metric_total_hertz_hertz_total_total"),
// metric name cannot start with a number
Arguments.of(
createSampleMetricData("2_metric_name", "By", PrometheusType.SUMMARY),
"_metric_name_bytes"));
}
static MetricData createSampleMetricData(
String metricName, String metricUnit, PrometheusType prometheusType) {
switch (prometheusType) {
case SUMMARY:
return ImmutableMetricData.createDoubleSummary(
SUMMARY.getResource(),
SUMMARY.getInstrumentationScopeInfo(),
metricName,
SUMMARY.getDescription(),
metricUnit,
SUMMARY.getSummaryData());
case COUNTER:
return ImmutableMetricData.createLongSum(
MONOTONIC_CUMULATIVE_LONG_SUM.getResource(),
MONOTONIC_CUMULATIVE_LONG_SUM.getInstrumentationScopeInfo(),
metricName,
MONOTONIC_CUMULATIVE_LONG_SUM.getDescription(),
metricUnit,
MONOTONIC_CUMULATIVE_LONG_SUM.getLongSumData());
case GAUGE:
return ImmutableMetricData.createDoubleGauge(
DOUBLE_GAUGE.getResource(),
DOUBLE_GAUGE.getInstrumentationScopeInfo(),
metricName,
DOUBLE_GAUGE.getDescription(),
metricUnit,
DOUBLE_GAUGE.getDoubleGaugeData());
case HISTOGRAM:
return ImmutableMetricData.createDoubleHistogram(
DELTA_HISTOGRAM.getResource(),
DELTA_HISTOGRAM.getInstrumentationScopeInfo(),
metricName,
DELTA_HISTOGRAM.getDescription(),
metricUnit,
DELTA_HISTOGRAM.getHistogramData());
}
throw new IllegalArgumentException();
}
}

View File

@ -6,7 +6,9 @@
package io.opentelemetry.exporter.prometheus;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import io.prometheus.metrics.model.snapshots.Unit;
import java.util.stream.Stream;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
@ -16,8 +18,13 @@ class PrometheusUnitsHelperTest {
@ParameterizedTest
@MethodSource("providePrometheusOTelUnitEquivalentPairs")
public void testPrometheusUnitEquivalency(String otlpUnit, String prometheusUnit) {
assertEquals(prometheusUnit, PrometheusUnitsHelper.getEquivalentPrometheusUnit(otlpUnit));
public void testPrometheusUnitEquivalency(String otlpUnit, String expectedPrometheusUnit) {
Unit actualPrometheusUnit = PrometheusUnitsHelper.convertUnit(otlpUnit);
if (expectedPrometheusUnit == null) {
assertNull(actualPrometheusUnit);
} else {
assertEquals(expectedPrometheusUnit, actualPrometheusUnit.toString());
}
}
private static Stream<Arguments> providePrometheusOTelUnitEquivalentPairs() {
@ -63,46 +70,34 @@ class PrometheusUnitsHelperTest {
// Unit not found - Case sensitive
Arguments.of("S", "S"),
// Special case - 1
Arguments.of("1", ""),
Arguments.of("1", null),
// Special Case - Drop metric units in {}
Arguments.of("{packets}", ""),
Arguments.of("{packets}", null),
// Special Case - Dropped metric units only in {}
Arguments.of("{packets}V", "volts"),
// Special Case - Dropped metric units with 'per' unit handling applicable
Arguments.of("{scanned}/{returned}", ""),
Arguments.of("{scanned}/{returned}", null),
// Special Case - Dropped metric units with 'per' unit handling applicable
Arguments.of("{objects}/s", "per_second"),
// Units expressing rate - 'per' units, both units expanded
Arguments.of("m/s", "meters_per_second"),
// Units expressing rate - per minute
Arguments.of("m/m", "meters_per_minute"),
Arguments.of("m/min", "meters_per_minute"),
// Units expressing rate - per day
Arguments.of("A/d", "amperes_per_day"),
// Units expressing rate - per week
Arguments.of("W/w", "watts_per_week"),
Arguments.of("W/wk", "watts_per_week"),
// Units expressing rate - per month
Arguments.of("J/mo", "joules_per_month"),
// Units expressing rate - per year
Arguments.of("TBy/y", "terabytes_per_year"),
Arguments.of("TBy/a", "terabytes_per_year"),
// Units expressing rate - 'per' units, both units unknown
Arguments.of("v/v", "v_per_v"),
// Units expressing rate - 'per' units, first unit unknown
Arguments.of("km/h", "km_per_hour"),
// Units expressing rate - 'per' units, 'per' unit unknown
Arguments.of("g/g", "grams_per_g"),
Arguments.of("g/x", "grams_per_x"),
// Misc - unit containing known abbreviations improperly formatted
Arguments.of("watts_W", "watts_W"),
// Unsupported symbols
Arguments.of("°F", "F"),
// Unsupported symbols - multiple
Arguments.of("unit+=.:,!* & #unused", "unit_unused"),
// Unsupported symbols - 'per' units
Arguments.of("__test $/°C", "test_per_C"),
// Unsupported symbols - whitespace
Arguments.of("\t", ""),
// Null unit
Arguments.of(null, null),
// Misc - unit cleanup - no case match special char
Arguments.of("$1000", "1000"));
Arguments.of("watts_W", "watts_W"));
}
}

View File

@ -1,335 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import static io.opentelemetry.exporter.prometheus.TestConstants.CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE;
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_DOUBLE_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_HISTOGRAM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DELTA_LONG_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_COLLIDING_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.DOUBLE_GAUGE_NO_ATTRIBUTES;
import static io.opentelemetry.exporter.prometheus.TestConstants.LONG_GAUGE;
import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_DOUBLE_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL;
import static io.opentelemetry.exporter.prometheus.TestConstants.MONOTONIC_CUMULATIVE_LONG_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.NON_MONOTONIC_CUMULATIVE_LONG_SUM;
import static io.opentelemetry.exporter.prometheus.TestConstants.SUMMARY;
import static org.assertj.core.api.Assertions.assertThat;
import io.github.netmikey.logunit.api.LogCapturer;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.common.AttributesBuilder;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.resources.Resource;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.function.BiConsumer;
import org.jetbrains.annotations.Nullable;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
class SerializerTest {
@RegisterExtension
private final LogCapturer logCapturer =
LogCapturer.create().captureForLogger(Serializer.class.getName());
@Test
void prometheus004() {
// Same output as prometheus client library except for these changes which are compatible with
// Prometheus
// TYPE / HELP line order reversed
// Attributes do not end in trailing comma
assertThat(
serialize004(
MONOTONIC_CUMULATIVE_DOUBLE_SUM,
MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL,
NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM,
DELTA_DOUBLE_SUM, // Deltas are dropped
MONOTONIC_CUMULATIVE_LONG_SUM,
NON_MONOTONIC_CUMULATIVE_LONG_SUM,
DELTA_LONG_SUM, // Deltas are dropped
DOUBLE_GAUGE,
LONG_GAUGE,
SUMMARY,
DELTA_HISTOGRAM, // Deltas are dropped
CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES,
CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE,
DOUBLE_GAUGE_NO_ATTRIBUTES,
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES,
DOUBLE_GAUGE_COLLIDING_ATTRIBUTES))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE otel_scope_info info\n"
+ "# HELP otel_scope_info Scope metadata\n"
+ "otel_scope_info{otel_scope_name=\"full\",otel_scope_version=\"version\",ks=\"vs\"} 1\n"
+ "# TYPE monotonic_cumulative_double_sum_seconds_total counter\n"
+ "# HELP monotonic_cumulative_double_sum_seconds_total description\n"
+ "monotonic_cumulative_double_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672000\n"
+ "# TYPE monotonic_cumulative_double_sum_suffix_seconds_total counter\n"
+ "# HELP monotonic_cumulative_double_sum_suffix_seconds_total description\n"
+ "monotonic_cumulative_double_sum_suffix_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672000\n"
+ "# TYPE non_monotonic_cumulative_double_sum_seconds gauge\n"
+ "# HELP non_monotonic_cumulative_double_sum_seconds description\n"
+ "non_monotonic_cumulative_double_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcds\"} 5.0 1633950672000\n"
+ "# TYPE monotonic_cumulative_long_sum_seconds_total counter\n"
+ "# HELP monotonic_cumulative_long_sum_seconds_total unused\n"
+ "monotonic_cumulative_long_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcls\"} 5.0 1633950672000\n"
+ "# TYPE non_monotonic_cumulative_long_sum_seconds gauge\n"
+ "# HELP non_monotonic_cumulative_long_sum_seconds unused\n"
+ "non_monotonic_cumulative_long_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcls\"} 5.0 1633950672000\n"
+ "# TYPE double_gauge_seconds gauge\n"
+ "# HELP double_gauge_seconds unused\n"
+ "double_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"dg\"} 5.0 1633950672000\n"
+ "# TYPE long_gauge_seconds gauge\n"
+ "# HELP long_gauge_seconds unused\n"
+ "long_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"lg\"} 5.0 1633950672000\n"
+ "# TYPE summary_seconds summary\n"
+ "# HELP summary_seconds unused\n"
+ "summary_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 5.0 1633950672000\n"
+ "summary_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 7.0 1633950672000\n"
+ "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.9\"} 0.1 1633950672000\n"
+ "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.99\"} 0.3 1633950672000\n"
+ "# TYPE cumulative_histogram_no_attributes_seconds histogram\n"
+ "# HELP cumulative_histogram_no_attributes_seconds unused\n"
+ "cumulative_histogram_no_attributes_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\"} 2.0 1633950672000\n"
+ "cumulative_histogram_no_attributes_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\"} 1.0 1633950672000\n"
+ "cumulative_histogram_no_attributes_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",le=\"+Inf\"} 2.0 1633950672000\n"
+ "# TYPE cumulative_histogram_single_attribute_seconds histogram\n"
+ "# HELP cumulative_histogram_single_attribute_seconds unused\n"
+ "cumulative_histogram_single_attribute_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 2.0 1633950672000\n"
+ "cumulative_histogram_single_attribute_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 1.0 1633950672000\n"
+ "cumulative_histogram_single_attribute_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\",le=\"+Inf\"} 2.0 1633950672000\n"
+ "# TYPE double_gauge_no_attributes_seconds gauge\n"
+ "# HELP double_gauge_no_attributes_seconds unused\n"
+ "double_gauge_no_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\"} 7.0 1633950672000\n"
+ "# TYPE double_gauge_multiple_attributes_seconds gauge\n"
+ "# HELP double_gauge_multiple_attributes_seconds unused\n"
+ "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672000\n"
+ "# TYPE double_gauge_colliding_attributes_seconds gauge\n"
+ "# HELP double_gauge_colliding_attributes_seconds unused\n"
+ "double_gauge_colliding_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",foo_bar=\"a;b\",type=\"dgma\"} 8.0 1633950672000\n");
assertThat(logCapturer.size()).isZero();
}
@Test
void openMetrics() {
assertThat(
serializeOpenMetrics(
MONOTONIC_CUMULATIVE_DOUBLE_SUM,
MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL,
NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM,
DELTA_DOUBLE_SUM, // Deltas are dropped
MONOTONIC_CUMULATIVE_LONG_SUM,
NON_MONOTONIC_CUMULATIVE_LONG_SUM,
DELTA_LONG_SUM, // Deltas are dropped
DOUBLE_GAUGE,
LONG_GAUGE,
SUMMARY,
DELTA_HISTOGRAM, // Deltas are dropped
CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES,
CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE,
DOUBLE_GAUGE_NO_ATTRIBUTES,
DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES,
DOUBLE_GAUGE_COLLIDING_ATTRIBUTES))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE otel_scope_info info\n"
+ "# HELP otel_scope_info Scope metadata\n"
+ "otel_scope_info{otel_scope_name=\"full\",otel_scope_version=\"version\",ks=\"vs\"} 1\n"
+ "# TYPE monotonic_cumulative_double_sum_seconds counter\n"
+ "# HELP monotonic_cumulative_double_sum_seconds description\n"
+ "monotonic_cumulative_double_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672.000\n"
+ "# TYPE monotonic_cumulative_double_sum_suffix_seconds_total counter\n"
+ "# HELP monotonic_cumulative_double_sum_suffix_seconds_total description\n"
+ "monotonic_cumulative_double_sum_suffix_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcds\"} 5.0 1633950672.000\n"
+ "# TYPE non_monotonic_cumulative_double_sum_seconds gauge\n"
+ "# HELP non_monotonic_cumulative_double_sum_seconds description\n"
+ "non_monotonic_cumulative_double_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcds\"} 5.0 1633950672.000\n"
+ "# TYPE monotonic_cumulative_long_sum_seconds counter\n"
+ "# HELP monotonic_cumulative_long_sum_seconds unused\n"
+ "monotonic_cumulative_long_sum_seconds_total{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"mcls\"} 5.0 1633950672.000\n"
+ "# TYPE non_monotonic_cumulative_long_sum_seconds gauge\n"
+ "# HELP non_monotonic_cumulative_long_sum_seconds unused\n"
+ "non_monotonic_cumulative_long_sum_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"nmcls\"} 5.0 1633950672.000\n"
+ "# TYPE double_gauge_seconds gauge\n"
+ "# HELP double_gauge_seconds unused\n"
+ "double_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"dg\"} 5.0 1633950672.000\n"
+ "# TYPE long_gauge_seconds gauge\n"
+ "# HELP long_gauge_seconds unused\n"
+ "long_gauge_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"lg\"} 5.0 1633950672.000\n"
+ "# TYPE summary_seconds summary\n"
+ "# HELP summary_seconds unused\n"
+ "summary_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 5.0 1633950672.000\n"
+ "summary_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\"} 7.0 1633950672.000\n"
+ "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.9\"} 0.1 1633950672.000\n"
+ "summary_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"s\",quantile=\"0.99\"} 0.3 1633950672.000\n"
+ "# TYPE cumulative_histogram_no_attributes_seconds histogram\n"
+ "# HELP cumulative_histogram_no_attributes_seconds unused\n"
+ "cumulative_histogram_no_attributes_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\"} 2.0 1633950672.000\n"
+ "cumulative_histogram_no_attributes_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\"} 1.0 1633950672.000\n"
+ "cumulative_histogram_no_attributes_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",le=\"+Inf\"} 2.0 1633950672.000 # {span_id=\"0000000000000002\",trace_id=\"00000000000000000000000000000001\"} 4.0 0.001\n"
+ "# TYPE cumulative_histogram_single_attribute_seconds histogram\n"
+ "# HELP cumulative_histogram_single_attribute_seconds unused\n"
+ "cumulative_histogram_single_attribute_seconds_count{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 2.0 1633950672.000\n"
+ "cumulative_histogram_single_attribute_seconds_sum{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\"} 1.0 1633950672.000\n"
+ "cumulative_histogram_single_attribute_seconds_bucket{otel_scope_name=\"full\",otel_scope_version=\"version\",type=\"hs\",le=\"+Inf\"} 2.0 1633950672.000 # {span_id=\"0000000000000002\",trace_id=\"00000000000000000000000000000001\"} 4.0 0.001\n"
+ "# TYPE double_gauge_no_attributes_seconds gauge\n"
+ "# HELP double_gauge_no_attributes_seconds unused\n"
+ "double_gauge_no_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\"} 7.0 1633950672.000\n"
+ "# TYPE double_gauge_multiple_attributes_seconds gauge\n"
+ "# HELP double_gauge_multiple_attributes_seconds unused\n"
+ "double_gauge_multiple_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",animal=\"bear\",type=\"dgma\"} 8.0 1633950672.000\n"
+ "# TYPE double_gauge_colliding_attributes_seconds gauge\n"
+ "# HELP double_gauge_colliding_attributes_seconds unused\n"
+ "double_gauge_colliding_attributes_seconds{otel_scope_name=\"full\",otel_scope_version=\"version\",foo_bar=\"a;b\",type=\"dgma\"} 8.0 1633950672.000\n"
+ "# EOF\n");
assertThat(logCapturer.size()).isZero();
}
@Test
@SuppressLogger(Serializer.class)
void outOfOrderedAttributes() {
// Alternative attributes implementation which sorts entries by the order they were added rather
// than lexicographically
// all attributes are retained, we log a warning, and b_key and b.key are not be merged
LinkedHashMap<AttributeKey<?>, Object> attributesMap = new LinkedHashMap<>();
attributesMap.put(AttributeKey.stringKey("b_key"), "val1");
attributesMap.put(AttributeKey.stringKey("a_key"), "val2");
attributesMap.put(AttributeKey.stringKey("b.key"), "val3");
Attributes attributes = new MapAttributes(attributesMap);
MetricData metricData =
ImmutableMetricData.createDoubleSum(
Resource.builder().put("kr", "vr").build(),
InstrumentationScopeInfo.builder("scope").setVersion("1.0.0").build(),
"sum",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L, 1633950672000000000L, attributes, 5))));
assertThat(serialize004(metricData))
.isEqualTo(
"# TYPE target info\n"
+ "# HELP target Target metadata\n"
+ "target_info{kr=\"vr\"} 1\n"
+ "# TYPE sum_seconds_total counter\n"
+ "# HELP sum_seconds_total description\n"
+ "sum_seconds_total{otel_scope_name=\"scope\",otel_scope_version=\"1.0.0\",b_key=\"val1\",a_key=\"val2\",b_key=\"val3\"} 5.0 1633950672000\n");
logCapturer.assertContains(
"Dropping out-of-order attribute a_key=val2, which occurred after b_key. This can occur when an alternative Attribute implementation is used.");
}
@Test
void emptyResource() {
MetricData metricData =
ImmutableMetricData.createDoubleSum(
Resource.empty(),
InstrumentationScopeInfo.builder("scope").setVersion("1.0.0").build(),
"monotonic.cumulative.double.sum",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L, 1633950672000000000L, Attributes.empty(), 5))));
assertThat(serialize004(metricData))
.isEqualTo(
"# TYPE monotonic_cumulative_double_sum_seconds_total counter\n"
+ "# HELP monotonic_cumulative_double_sum_seconds_total description\n"
+ "monotonic_cumulative_double_sum_seconds_total{otel_scope_name=\"scope\",otel_scope_version=\"1.0.0\"} 5.0 1633950672000\n");
}
private static String serialize004(MetricData... metrics) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
new Serializer.Prometheus004Serializer(unused -> true).write(Arrays.asList(metrics), bos);
return bos.toString("UTF-8");
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
private static String serializeOpenMetrics(MetricData... metrics) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
new Serializer.OpenMetrics100Serializer(unused -> true).write(Arrays.asList(metrics), bos);
return bos.toString("UTF-8");
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@SuppressWarnings("unchecked")
private static class MapAttributes implements Attributes {
private final LinkedHashMap<AttributeKey<?>, Object> map;
@SuppressWarnings("NonApiType")
private MapAttributes(LinkedHashMap<AttributeKey<?>, Object> map) {
this.map = map;
}
@Nullable
@Override
public <T> T get(AttributeKey<T> key) {
return (T) map.get(key);
}
@Override
public void forEach(BiConsumer<? super AttributeKey<?>, ? super Object> consumer) {
map.forEach(consumer);
}
@Override
public int size() {
return map.size();
}
@Override
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public Map<AttributeKey<?>, Object> asMap() {
return map;
}
@Override
public AttributesBuilder toBuilder() {
throw new UnsupportedOperationException("not supported");
}
}
}

View File

@ -1,378 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.prometheus;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.api.trace.TraceFlags;
import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
/** A helper class encapsulating immutable static data that can be shared across all the tests. */
class TestConstants {
private TestConstants() {
// Private constructor to prevent instantiation
}
private static final AttributeKey<String> TYPE = stringKey("type");
static final MetricData MONOTONIC_CUMULATIVE_DOUBLE_SUM =
ImmutableMetricData.createDoubleSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"monotonic.cumulative.double.sum",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "mcds"),
5))));
static final MetricData MONOTONIC_CUMULATIVE_DOUBLE_SUM_WITH_SUFFIX_TOTAL =
ImmutableMetricData.createDoubleSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"monotonic.cumulative.double.sum.suffix.total",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "mcds"),
5))));
static final MetricData NON_MONOTONIC_CUMULATIVE_DOUBLE_SUM =
ImmutableMetricData.createDoubleSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"non.monotonic.cumulative.double.sum",
"description",
"s",
ImmutableSumData.create(
/* isMonotonic= */ false,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "nmcds"),
5))));
static final MetricData DELTA_DOUBLE_SUM =
ImmutableMetricData.createDoubleSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"delta.double.sum",
"unused",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.DELTA,
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "mdds"),
5))));
static final MetricData MONOTONIC_CUMULATIVE_LONG_SUM =
ImmutableMetricData.createLongSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"monotonic.cumulative.long.sum",
"unused",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableLongPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "mcls"),
5))));
static final MetricData NON_MONOTONIC_CUMULATIVE_LONG_SUM =
ImmutableMetricData.createLongSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"non.monotonic.cumulative.long_sum",
"unused",
"s",
ImmutableSumData.create(
/* isMonotonic= */ false,
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableLongPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "nmcls"),
5))));
static final MetricData DELTA_LONG_SUM =
ImmutableMetricData.createLongSum(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"delta.long.sum",
"unused",
"s",
ImmutableSumData.create(
/* isMonotonic= */ true,
AggregationTemporality.DELTA,
Collections.singletonList(
ImmutableLongPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "mdls"),
5))));
static final MetricData DOUBLE_GAUGE =
ImmutableMetricData.createDoubleGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"double.gauge",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L, 1633950672000000000L, Attributes.of(TYPE, "dg"), 5))));
static final MetricData LONG_GAUGE =
ImmutableMetricData.createLongGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"long.gauge",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableLongPointData.create(
1633947011000000000L, 1633950672000000000L, Attributes.of(TYPE, "lg"), 5))));
static final MetricData SUMMARY =
ImmutableMetricData.createDoubleSummary(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"summary",
"unused",
"s",
ImmutableSummaryData.create(
Collections.singletonList(
ImmutableSummaryPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "s"),
5,
7,
Arrays.asList(
ImmutableValueAtQuantile.create(0.9, 0.1),
ImmutableValueAtQuantile.create(0.99, 0.3))))));
static final MetricData DELTA_HISTOGRAM =
ImmutableMetricData.createDoubleHistogram(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"delta.histogram",
"unused",
"s",
ImmutableHistogramData.create(
AggregationTemporality.DELTA,
Collections.singletonList(
ImmutableHistogramPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.empty(),
1.0,
/* hasMin= */ false,
0,
/* hasMax= */ false,
0,
Collections.emptyList(),
Collections.singletonList(2L),
Collections.emptyList()))));
static final MetricData CUMULATIVE_HISTOGRAM_NO_ATTRIBUTES =
ImmutableMetricData.createDoubleHistogram(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"cumulative.histogram.no.attributes",
"unused",
"s",
ImmutableHistogramData.create(
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableHistogramPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.empty(),
1.0,
/* hasMin= */ false,
0,
/* hasMax= */ false,
0,
Collections.emptyList(),
Collections.singletonList(2L),
Collections.singletonList(
ImmutableDoubleExemplarData.create(
Attributes.empty(),
TimeUnit.MILLISECONDS.toNanos(1L),
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
/* value= */ 4))))));
static final MetricData CUMULATIVE_HISTOGRAM_SINGLE_ATTRIBUTE =
ImmutableMetricData.createDoubleHistogram(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"cumulative.histogram.single.attribute",
"unused",
"s",
ImmutableHistogramData.create(
AggregationTemporality.CUMULATIVE,
Collections.singletonList(
ImmutableHistogramPointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "hs"),
1.0,
/* hasMin= */ false,
0,
/* hasMax= */ false,
0,
Collections.emptyList(),
Collections.singletonList(2L),
Collections.singletonList(
ImmutableDoubleExemplarData.create(
Attributes.empty(),
TimeUnit.MILLISECONDS.toNanos(1L),
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
/* value= */ 4))))));
static final MetricData DOUBLE_GAUGE_NO_ATTRIBUTES =
ImmutableMetricData.createDoubleGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"double.gauge.no.attributes",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L, 1633950672000000000L, Attributes.empty(), 7))));
static final MetricData DOUBLE_GAUGE_MULTIPLE_ATTRIBUTES =
ImmutableMetricData.createDoubleGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"double.gauge.multiple.attributes",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(TYPE, "dgma", stringKey("animal"), "bear"),
8))));
static final MetricData DOUBLE_GAUGE_COLLIDING_ATTRIBUTES =
ImmutableMetricData.createDoubleGauge(
Resource.create(Attributes.of(stringKey("kr"), "vr")),
InstrumentationScopeInfo.builder("full")
.setVersion("version")
.setAttributes(Attributes.of(stringKey("ks"), "vs"))
.build(),
"double.gauge.colliding.attributes",
"unused",
"s",
ImmutableGaugeData.create(
Collections.singletonList(
ImmutableDoublePointData.create(
1633947011000000000L,
1633950672000000000L,
Attributes.of(
TYPE, "dgma", stringKey("foo.bar"), "a", stringKey("foo_bar"), "b"),
8))));
}

View File

@ -15,6 +15,7 @@ import io.opentelemetry.exporter.prometheus.PrometheusHttpServer;
import io.opentelemetry.sdk.autoconfigure.spi.ConfigProperties;
import io.opentelemetry.sdk.autoconfigure.spi.internal.DefaultConfigProperties;
import io.opentelemetry.sdk.metrics.export.MetricReader;
import io.prometheus.metrics.exporter.httpserver.HTTPServer;
import java.io.IOException;
import java.net.ServerSocket;
import java.util.HashMap;
@ -49,6 +50,7 @@ class PrometheusMetricReaderProviderTest {
try (MetricReader metricReader = provider.createMetricReader(configProperties)) {
assertThat(metricReader)
.isInstanceOf(PrometheusHttpServer.class)
.extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class)))
.extracting("server", as(InstanceOfAssertFactories.type(HttpServer.class)))
.satisfies(
server -> {
@ -78,6 +80,7 @@ class PrometheusMetricReaderProviderTest {
try (MetricReader metricReader =
provider.createMetricReader(DefaultConfigProperties.createFromMap(config))) {
assertThat(metricReader)
.extracting("httpServer", as(InstanceOfAssertFactories.type(HTTPServer.class)))
.extracting("server", as(InstanceOfAssertFactories.type(HttpServer.class)))
.satisfies(
server -> {