Wire Exemplars into the metrics.data package (#3353)

* Add Exemplars into `metrics.data` pacakge

- Add Exemplars to match OTLP spec
- Add assertj helpers for exemplar extraction on points.

* Wire exemplar export to OTLP exporter

* Wire exemplar export to Prometheus exporter

* Add javadoc for AbstractSampledPointDataAssert

* Fixes from review.

* Fixes from review.

* Fixes from review.

* Fixes from spotless.

* Fixes froom review.

* Add clarification to javadoc from review.

* ONe last javadoc cleanup.

* Fixes to javadoc build.

* Update method name from review.

* Fixes from review.
This commit is contained in:
Josh Suereth 2021-07-10 00:20:31 -04:00 committed by GitHub
parent 57bd952756
commit ae4bb364cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 603 additions and 31 deletions

View File

@ -9,7 +9,13 @@ import static io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATI
import static io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA;
import static io.opentelemetry.proto.metrics.v1.AggregationTemporality.AGGREGATION_TEMPORALITY_UNSPECIFIED;
import com.google.protobuf.ByteString;
import com.google.protobuf.UnsafeByteOperations;
import io.opentelemetry.api.internal.OtelEncodingUtils;
import io.opentelemetry.api.trace.SpanId;
import io.opentelemetry.api.trace.TraceId;
import io.opentelemetry.proto.metrics.v1.AggregationTemporality;
import io.opentelemetry.proto.metrics.v1.Exemplar;
import io.opentelemetry.proto.metrics.v1.Gauge;
import io.opentelemetry.proto.metrics.v1.Histogram;
import io.opentelemetry.proto.metrics.v1.HistogramDataPoint;
@ -21,6 +27,8 @@ import io.opentelemetry.proto.metrics.v1.Sum;
import io.opentelemetry.proto.metrics.v1.Summary;
import io.opentelemetry.proto.metrics.v1.SummaryDataPoint;
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.data.DoubleExemplar;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
@ -28,6 +36,7 @@ import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplar;
import io.opentelemetry.sdk.metrics.data.LongGaugeData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.LongSumData;
@ -39,10 +48,15 @@ import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/** Converter from SDK {@link MetricData} to OTLP {@link ResourceMetrics}. */
public final class MetricAdapter {
private static final ThrottlingLogger logger =
new ThrottlingLogger(Logger.getLogger(MetricAdapter.class.getName()));
/** Converts the provided {@link MetricData} to {@link ResourceMetrics}. */
public static List<ResourceMetrics> toProtoResourceMetrics(Collection<MetricData> metricData) {
Map<Resource, Map<InstrumentationLibraryInfo, List<Metric>>> resourceAndLibraryMap =
@ -199,6 +213,7 @@ public final class MetricAdapter {
.getAttributes()
.forEach(
(key, value) -> builder.addAttributes(CommonAdapter.toProtoAttribute(key, value)));
longPoint.getExemplars().forEach(e -> builder.addExemplars(toExemplar(e)));
result.add(builder.build());
}
return result;
@ -216,6 +231,7 @@ public final class MetricAdapter {
.getAttributes()
.forEach(
(key, value) -> builder.addAttributes(CommonAdapter.toProtoAttribute(key, value)));
doublePoint.getExemplars().forEach(e -> builder.addExemplars(toExemplar(e)));
result.add(builder.build());
}
return result;
@ -269,10 +285,48 @@ public final class MetricAdapter {
.getAttributes()
.forEach(
(key, value) -> builder.addAttributes(CommonAdapter.toProtoAttribute(key, value)));
doubleHistogramPoint.getExemplars().forEach(e -> builder.addExemplars(toExemplar(e)));
result.add(builder.build());
}
return result;
}
static Exemplar toExemplar(io.opentelemetry.sdk.metrics.data.Exemplar exemplar) {
// TODO - Use a thread local cache for spanid/traceid -> byte conversion.
Exemplar.Builder builder = Exemplar.newBuilder();
builder.setTimeUnixNano(exemplar.getEpochNanos());
if (exemplar.getSpanId() != null) {
builder.setSpanId(convertSpanId(exemplar.getSpanId()));
}
if (exemplar.getTraceId() != null) {
builder.setTraceId(convertTraceId(exemplar.getTraceId()));
}
exemplar
.getFilteredAttributes()
.forEach(
(key, value) ->
builder.addFilteredAttributes(CommonAdapter.toProtoAttribute(key, value)));
if (exemplar instanceof LongExemplar) {
builder.setAsInt(((LongExemplar) exemplar).getValue());
} else if (exemplar instanceof DoubleExemplar) {
builder.setAsDouble(((DoubleExemplar) exemplar).getValue());
} else {
if (logger.isLoggable(Level.SEVERE)) {
logger.log(Level.SEVERE, "Unable to convert unknown exemplar type: " + exemplar);
}
}
return builder.build();
}
private static ByteString convertTraceId(String id) {
return UnsafeByteOperations.unsafeWrap(
OtelEncodingUtils.bytesFromBase16(id, TraceId.getLength()));
}
private static ByteString convertSpanId(String id) {
return UnsafeByteOperations.unsafeWrap(
OtelEncodingUtils.bytesFromBase16(id, SpanId.getLength()));
}
private MetricAdapter() {}
}

View File

@ -12,10 +12,12 @@ import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.ByteString;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.proto.common.v1.AnyValue;
import io.opentelemetry.proto.common.v1.InstrumentationLibrary;
import io.opentelemetry.proto.common.v1.KeyValue;
import io.opentelemetry.proto.metrics.v1.Exemplar;
import io.opentelemetry.proto.metrics.v1.Gauge;
import io.opentelemetry.proto.metrics.v1.Histogram;
import io.opentelemetry.proto.metrics.v1.HistogramDataPoint;
@ -28,6 +30,7 @@ import io.opentelemetry.proto.metrics.v1.Summary;
import io.opentelemetry.proto.metrics.v1.SummaryDataPoint;
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplar;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
@ -35,12 +38,14 @@ import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplar;
import io.opentelemetry.sdk.metrics.data.LongGaugeData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.LongSumData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.ValueAtPercentile;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
@ -57,7 +62,19 @@ class MetricAdapterTest {
assertThat(MetricAdapter.toIntDataPoints(Collections.emptyList())).isEmpty();
assertThat(
MetricAdapter.toIntDataPoints(
singletonList(LongPointData.create(123, 456, KV_ATTR, 5))))
singletonList(
LongPointData.create(
123,
456,
KV_ATTR,
5,
Arrays.asList(
LongExemplar.create(
Attributes.of(stringKey("test"), "value"),
2,
/*spanId=*/ "0000000000000002",
/*traceId=*/ "00000000000000000000000000000001",
1))))))
.containsExactly(
NumberDataPoint.newBuilder()
.setStartTimeUnixNano(123)
@ -66,6 +83,20 @@ class MetricAdapterTest {
singletonList(
KeyValue.newBuilder().setKey("k").setValue(stringValue("v")).build()))
.setAsInt(5)
.addExemplars(
Exemplar.newBuilder()
.setTimeUnixNano(2)
.addFilteredAttributes(
KeyValue.newBuilder()
.setKey("test")
.setValue(stringValue("value"))
.build())
.setSpanId(ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 2}))
.setTraceId(
ByteString.copyFrom(
new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}))
.setAsInt(1)
.build())
.build());
assertThat(
MetricAdapter.toIntDataPoints(
@ -206,7 +237,14 @@ class MetricAdapterTest {
Attributes.empty(),
15.3,
ImmutableList.of(),
ImmutableList.of(7L)))))
ImmutableList.of(7L),
ImmutableList.of(
DoubleExemplar.create(
Attributes.of(stringKey("test"), "value"),
2,
/*spanId=*/ "0000000000000002",
/*traceId=*/ "00000000000000000000000000000001",
1.5))))))
.containsExactly(
HistogramDataPoint.newBuilder()
.setStartTimeUnixNano(123)
@ -226,6 +264,20 @@ class MetricAdapterTest {
.setCount(7)
.setSum(15.3)
.addBucketCounts(7)
.addExemplars(
Exemplar.newBuilder()
.setTimeUnixNano(2)
.addFilteredAttributes(
KeyValue.newBuilder()
.setKey("test")
.setValue(stringValue("value"))
.build())
.setSpanId(ByteString.copyFrom(new byte[] {0, 0, 0, 0, 0, 0, 0, 2}))
.setTraceId(
ByteString.copyFrom(
new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}))
.setAsDouble(1.5)
.build())
.build());
}

View File

@ -13,6 +13,7 @@ import io.opentelemetry.sdk.metrics.data.DoubleHistogramPointData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData;
import io.opentelemetry.sdk.metrics.data.Exemplar;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.LongSumData;
import io.opentelemetry.sdk.metrics.data.MetricData;
@ -26,7 +27,9 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import javax.annotation.Nullable;
/**
* Util methods to convert OpenTelemetry Metrics data models to Prometheus data models.
@ -117,12 +120,24 @@ final class MetricAdapter {
case DOUBLE_SUM:
case DOUBLE_GAUGE:
DoublePointData doublePoint = (DoublePointData) pointData;
samples.add(new Sample(name, labelNames, labelValues, doublePoint.getValue()));
samples.add(
createSample(
name,
labelNames,
labelValues,
doublePoint.getValue(),
lastExemplarOrNull(doublePoint.getExemplars())));
break;
case LONG_SUM:
case LONG_GAUGE:
LongPointData longPoint = (LongPointData) pointData;
samples.add(new Sample(name, labelNames, labelValues, longPoint.getValue()));
samples.add(
createSample(
name,
labelNames,
labelValues,
longPoint.getValue(),
lastExemplarOrNull(longPoint.getExemplars())));
break;
case SUMMARY:
addSummarySamples(
@ -183,21 +198,50 @@ final class MetricAdapter {
labelNamesWithLe.add(LABEL_NAME_LE);
long cumulativeCount = 0;
List<Double> boundaries = doubleHistogramPointData.getBoundaries();
List<Long> counts = doubleHistogramPointData.getCounts();
for (int i = 0; i < counts.size(); i++) {
List<String> labelValuesWithLe = new ArrayList<>(labelValues.size() + 1);
// This is the upper boundary (inclusive). I.e. all values should be < this value (LE -
// Less-then-or-Equal).
double boundary = doubleHistogramPointData.getBucketUpperBound(i);
labelValuesWithLe.addAll(labelValues);
labelValuesWithLe.add(
doubleToGoString(i < boundaries.size() ? boundaries.get(i) : Double.POSITIVE_INFINITY));
labelValuesWithLe.add(doubleToGoString(boundary));
cumulativeCount += counts.get(i);
samples.add(
new Sample(
name + SAMPLE_SUFFIX_BUCKET, labelNamesWithLe, labelValuesWithLe, cumulativeCount));
createSample(
name + SAMPLE_SUFFIX_BUCKET,
labelNamesWithLe,
labelValuesWithLe,
cumulativeCount,
filterExemplars(
doubleHistogramPointData.getExemplars(),
doubleHistogramPointData.getBucketLowerBound(i),
boundary)));
}
}
@Nullable
private static Exemplar lastExemplarOrNull(Collection<Exemplar> exemplars) {
Exemplar result = null;
for (Exemplar e : exemplars) {
result = e;
}
return result;
}
@Nullable
private static Exemplar filterExemplars(Collection<Exemplar> exemplars, double min, double max) {
Exemplar result = null;
for (Exemplar e : exemplars) {
double value = e.getValueAsDouble();
if (value <= max && value > min) {
result = e;
}
}
return result;
}
private static int estimateNumSamples(int numPoints, MetricDataType type) {
if (type == MetricDataType.SUMMARY) {
// count + sum + estimated 2 percentiles (default MinMaxSumCount aggregator).
@ -224,5 +268,31 @@ final class MetricAdapter {
return Collections.emptyList();
}
private static Sample createSample(
String name,
List<String> labelNames,
List<String> labelValues,
double value,
@Nullable Exemplar exemplar) {
if (exemplar != null) {
return new Sample(name, labelNames, labelValues, value, toPrometheusExemplar(exemplar));
}
return new Sample(name, labelNames, labelValues, value);
}
private static io.prometheus.client.exemplars.Exemplar toPrometheusExemplar(Exemplar exemplar) {
if (exemplar.getSpanId() != null && exemplar.getTraceId() != null) {
return new io.prometheus.client.exemplars.Exemplar(
exemplar.getValueAsDouble(),
// Convert to ms for prometheus, truncate nanosecond precision.
TimeUnit.NANOSECONDS.toMillis(exemplar.getEpochNanos()),
"trace_id",
exemplar.getTraceId(),
"span_id",
exemplar.getSpanId());
}
return new io.prometheus.client.exemplars.Exemplar(exemplar.getValueAsDouble());
}
private MetricAdapter() {}
}

View File

@ -19,6 +19,7 @@ import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryData;
import io.opentelemetry.sdk.metrics.data.DoubleSummaryPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplar;
import io.opentelemetry.sdk.metrics.data.LongGaugeData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.LongSumData;
@ -29,7 +30,10 @@ import io.opentelemetry.sdk.resources.Resource;
import io.prometheus.client.Collector;
import io.prometheus.client.Collector.MetricFamilySamples;
import io.prometheus.client.Collector.MetricFamilySamples.Sample;
import io.prometheus.client.exemplars.Exemplar;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import org.assertj.core.presentation.StandardRepresentation;
import org.junit.jupiter.api.Test;
/** Unit tests for {@link MetricAdapter}. */
@ -171,7 +175,14 @@ class MetricAdapterTest {
KP_VP_ATTR,
1.0,
Collections.emptyList(),
Collections.singletonList(2L)))));
Collections.singletonList(2L),
Collections.singletonList(
LongExemplar.create(
Attributes.empty(),
TimeUnit.MILLISECONDS.toNanos(1L),
/* spanId= */ "span_id",
/* traceId= */ "trace_id",
/* value= */ 4))))));
@Test
void toProtoMetricDescriptorType() {
@ -347,28 +358,48 @@ class MetricAdapterTest {
MetricAdapter.toSamples("full_name", MetricDataType.HISTOGRAM, Collections.emptyList()))
.isEmpty();
assertThat(
MetricAdapter.toSamples(
"full_name",
MetricDataType.HISTOGRAM,
ImmutableList.of(
DoubleHistogramPointData.create(
321,
654,
KP_VP_ATTR,
18.3,
ImmutableList.of(1.0),
ImmutableList.of(4L, 9L)))))
java.util.List<Sample> result =
MetricAdapter.toSamples(
"full_name",
MetricDataType.HISTOGRAM,
ImmutableList.of(
DoubleHistogramPointData.create(
321,
654,
KP_VP_ATTR,
18.3,
ImmutableList.of(1.0),
ImmutableList.of(4L, 9L),
ImmutableList.of(
LongExemplar.create(
Attributes.empty(),
/*recordTime=*/ 0,
"other_span_id",
"other_trace_id",
/*value=*/ 0),
LongExemplar.create(
Attributes.empty(),
/*recordTime=*/ TimeUnit.MILLISECONDS.toNanos(2),
"my_span_id",
"my_trace_id",
/*value=*/ 2)))));
assertThat(result)
.withRepresentation(new ExemplarFriendlyRepresentation())
.containsExactly(
new Sample("full_name_count", ImmutableList.of("kp"), ImmutableList.of("vp"), 13),
new Sample("full_name_sum", ImmutableList.of("kp"), ImmutableList.of("vp"), 18.3),
new Sample(
"full_name_bucket", ImmutableList.of("kp", "le"), ImmutableList.of("vp", "1.0"), 4),
"full_name_bucket",
ImmutableList.of("kp", "le"),
ImmutableList.of("vp", "1.0"),
4,
new Exemplar(0d, 0L, "trace_id", "other_trace_id", "span_id", "other_span_id")),
new Sample(
"full_name_bucket",
ImmutableList.of("kp", "le"),
ImmutableList.of("vp", "+Inf"),
13));
13,
new Exemplar(2d, 2L, "trace_id", "my_trace_id", "span_id", "my_span_id")));
}
@Test
@ -384,4 +415,44 @@ class MetricAdapterTest {
new Sample(
"instrument_name", ImmutableList.of("kp"), ImmutableList.of("vp"), 5))));
}
/**
* Make pretty-printing error messages nice, as prometheus doesn't output exemplars in toString.
*/
private static class ExemplarFriendlyRepresentation extends StandardRepresentation {
@Override
public String fallbackToStringOf(Object object) {
if (object instanceof Exemplar) {
return exemplarToString((Exemplar) object);
}
if (object instanceof Sample) {
Sample sample = (Sample) object;
if (sample.exemplar != null) {
StringBuilder sb = new StringBuilder(sample.toString());
sb.append(" Exemplar=").append(exemplarToString(sample.exemplar));
return sb.toString();
}
}
if (object != null) {
return super.fallbackToStringOf(object);
}
return "null";
}
/** Convert an exemplar into a human readable string. */
private static String exemplarToString(Exemplar exemplar) {
StringBuilder sb = new StringBuilder("Exemplar{ value=");
sb.append(exemplar.getValue());
sb.append(", ts=");
sb.append(exemplar.getTimestampMs());
sb.append(", labels=");
for (int idx = 0; idx < exemplar.getNumberOfLabels(); ++idx) {
sb.append(exemplar.getLabelName(idx));
sb.append("=");
sb.append(exemplar.getLabelValue(idx));
sb.append(" ");
}
sb.append("}");
return sb.toString();
}
}
}

View File

@ -6,10 +6,12 @@
package io.opentelemetry.sdk.testing.assertj.metrics;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.data.Exemplar;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.testing.assertj.AttributesAssert;
import io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link PointData}. */
@ -47,4 +49,24 @@ public class AbstractPointDataAssert<
isNotNull();
return OpenTelemetryAssertions.assertThat(actual.getAttributes());
}
/** Returns convenience API to assert against the {@code exemplars} field. */
public AbstractIterableAssert<?, ? extends Iterable<? extends Exemplar>, Exemplar, ?>
exemplars() {
isNotNull();
return Assertions.assertThat(actual.getExemplars());
}
/**
* Ensures the {@code exemplars} field matches the expected value.
*
* @param exemplars The list of exemplars that will be checked, can be in any order.
*/
public PointAssertT hasExemplars(Exemplar... exemplars) {
isNotNull();
Assertions.assertThat(actual.getExemplars())
.as("exemplars")
.containsExactlyInAnyOrder(exemplars);
return myself;
}
}

View File

@ -12,10 +12,12 @@ import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplar;
import io.opentelemetry.sdk.metrics.data.DoubleGaugeData;
import io.opentelemetry.sdk.metrics.data.DoubleHistogramData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.DoubleSumData;
import io.opentelemetry.sdk.metrics.data.LongExemplar;
import io.opentelemetry.sdk.metrics.data.LongGaugeData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.LongSumData;
@ -90,8 +92,15 @@ public class MetricAssertionsTest {
// Points
Collections.emptyList()));
private static final DoubleExemplar DOUBLE_EXEMPLAR =
DoubleExemplar.create(Attributes.empty(), 0, "span", "trace", 1.0);
private static final DoublePointData DOUBLE_POINT_DATA =
DoublePointData.create(1, 2, Attributes.empty(), 3.0);
DoublePointData.create(1, 2, Attributes.empty(), 3.0, Collections.emptyList());
private static final DoublePointData DOUBLE_POINT_DATA_WITH_EXEMPLAR =
DoublePointData.create(
1, 2, Attributes.empty(), 3.0, Collections.singletonList(DOUBLE_EXEMPLAR));
private static final MetricData LONG_GAUGE_METRIC =
MetricData.createLongGauge(
@ -130,8 +139,14 @@ public class MetricAssertionsTest {
// Points
Collections.emptyList()));
private static final LongExemplar LONG_EXEMPLAR =
LongExemplar.create(Attributes.empty(), 0, "span", "trace", 1);
private static final LongPointData LONG_POINT_DATA =
LongPointData.create(1, 2, Attributes.empty(), 3);
LongPointData.create(1, 2, Attributes.empty(), 3, Collections.emptyList());
private static final LongPointData LONG_POINT_DATA_WITH_EXEMPLAR =
LongPointData.create(1, 2, Attributes.empty(), 3, Collections.singletonList(LONG_EXEMPLAR));
@Test
void metric_passing() {
@ -223,7 +238,11 @@ public class MetricAssertionsTest {
.hasStartEpochNanos(1)
.hasEpochNanos(2)
.hasValue(3)
.hasAttributes(Attributes.empty());
.hasAttributes(Attributes.empty())
.exemplars()
.isEmpty();
assertThat(DOUBLE_POINT_DATA_WITH_EXEMPLAR).hasExemplars(DOUBLE_EXEMPLAR);
}
@Test
@ -240,6 +259,12 @@ public class MetricAssertionsTest {
assertThat(DOUBLE_POINT_DATA)
.hasAttributes(Attributes.builder().put("x", "y").build()))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(DOUBLE_POINT_DATA)
.hasExemplars(
DoubleExemplar.create(Attributes.empty(), 0, "span", "trace", 1.0)))
.isInstanceOf(AssertionError.class);
}
@Test
@ -248,7 +273,11 @@ public class MetricAssertionsTest {
.hasStartEpochNanos(1)
.hasEpochNanos(2)
.hasValue(3)
.hasAttributes(Attributes.empty());
.hasAttributes(Attributes.empty())
.exemplars()
.isEmpty();
assertThat(LONG_POINT_DATA_WITH_EXEMPLAR).hasExemplars(LONG_EXEMPLAR);
}
@Test
@ -265,6 +294,11 @@ public class MetricAssertionsTest {
assertThat(LONG_POINT_DATA)
.hasAttributes(Attributes.builder().put("x", "y").build()))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(LONG_POINT_DATA)
.hasExemplars(LongExemplar.create(Attributes.empty(), 0, "span", "trace", 1)))
.isInstanceOf(AssertionError.class);
}
@Test

View File

@ -0,0 +1,46 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import javax.annotation.concurrent.Immutable;
/** An {@link Exemplar} with {@code double} measurments. */
@Immutable
@AutoValue
public abstract class DoubleExemplar implements Exemplar {
/**
* Construct a new exemplar.
*
* @param filteredAttributes The set of {@link Attributes} not already associated with the {@link
* PointData}.
* @param recordTimeNanos The time when the sample qas recorded in nanoseconds.
* @param spanId (optional) The associated SpanId.
* @param traceId (optional) The associated TraceId.
* @param value The value recorded.
*/
public static DoubleExemplar create(
Attributes filteredAttributes,
long recordTimeNanos,
String spanId,
String traceId,
double value) {
return new AutoValue_DoubleExemplar(
filteredAttributes, recordTimeNanos, spanId, traceId, value);
}
DoubleExemplar() {}
/** Numerical value of the measurement that was recorded. */
public abstract double getValue();
@Override
public final double getValueAsDouble() {
return getValue();
}
}

View File

@ -34,6 +34,25 @@ public abstract class DoubleHistogramPointData implements PointData {
double sum,
List<Double> boundaries,
List<Long> counts) {
return create(
startEpochNanos, epochNanos, attributes, sum, boundaries, counts, Collections.emptyList());
}
/**
* Creates a DoubleHistogramPointData. For a Histogram with N defined boundaries, there should be
* N+1 counts.
*
* @return a DoubleHistogramPointData.
* @throws IllegalArgumentException if the given boundaries/counts were invalid
*/
public static DoubleHistogramPointData create(
long startEpochNanos,
long epochNanos,
Attributes attributes,
double sum,
List<Double> boundaries,
List<Long> counts,
List<Exemplar> exemplars) {
if (counts.size() != boundaries.size() + 1) {
throw new IllegalArgumentException(
"invalid counts: size should be "
@ -57,6 +76,7 @@ public abstract class DoubleHistogramPointData implements PointData {
startEpochNanos,
epochNanos,
attributes,
exemplars,
sum,
totalCount,
Collections.unmodifiableList(new ArrayList<>(boundaries)),
@ -95,6 +115,25 @@ public abstract class DoubleHistogramPointData implements PointData {
*/
public abstract List<Long> getCounts();
/**
* Returns the lower bound of a bucket (all values would have been greater than).
*
* @param bucketIndex The bucket index, should match {@link #getCounts()} index.
*/
public double getBucketLowerBound(int bucketIndex) {
return bucketIndex > 0 ? getBoundaries().get(bucketIndex - 1) : Double.NEGATIVE_INFINITY;
}
/**
* Returns the upper inclusive bound of a bucket (all values would have been less then or equal).
*
* @param bucketIndex The bucket index, should match {@link #getCounts()} index.
*/
public double getBucketUpperBound(int bucketIndex) {
return (bucketIndex < getBoundaries().size())
? getBoundaries().get(bucketIndex)
: Double.POSITIVE_INFINITY;
}
private static boolean isStrictlyIncreasing(List<Double> xs) {
for (int i = 0; i < xs.size() - 1; i++) {
if (xs.get(i).compareTo(xs.get(i + 1)) >= 0) {

View File

@ -7,6 +7,8 @@ package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/**
@ -16,9 +18,40 @@ import javax.annotation.concurrent.Immutable;
@Immutable
@AutoValue
public abstract class DoublePointData implements PointData {
/**
* Creates a {@link DoublePointData}.
*
* @param startEpochNanos The starting time for the period where this point was sampled. Note:
* While start time is optional in OTLP, all SDKs should produce it for all their metrics, so
* it is required here.
* @param epochNanos The ending time for the period when this value was sampled.
* @param attributes The set of attributes associated with this point.
* @param value The value that was sampled.
*/
public static DoublePointData create(
long startEpochNanos, long epochNanos, Attributes attributes, double value) {
return new AutoValue_DoublePointData(startEpochNanos, epochNanos, attributes, value);
return create(startEpochNanos, epochNanos, attributes, value, Collections.emptyList());
}
/**
* Creates a {@link DoublePointData}.
*
* @param startEpochNanos The starting time for the period where this point was sampled. Note:
* While start time is optional in OTLP, all SDKs should produce it for all their metrics, so
* it is required here.
* @param epochNanos The ending time for the period when this value was sampled.
* @param attributes The set of attributes associated with this point.
* @param value The value that was sampled.
* @param exemplars A collection of interesting sampled values from this time period.
*/
public static DoublePointData create(
long startEpochNanos,
long epochNanos,
Attributes attributes,
double value,
List<Exemplar> exemplars) {
return new AutoValue_DoublePointData(startEpochNanos, epochNanos, attributes, exemplars, value);
}
DoublePointData() {}

View File

@ -7,6 +7,7 @@ package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
@ -17,6 +18,17 @@ import javax.annotation.concurrent.Immutable;
@Immutable
@AutoValue
public abstract class DoubleSummaryPointData implements PointData {
/**
* Creates a {@link DoubleSummaryPointData}.
*
* @param startEpochNanos (optional) The starting time for the period where this point was
* sampled.
* @param epochNanos The ending time for the period when this value was sampled.
* @param attributes The set of attributes associated with this point.
* @param count The number of measurements being sumarized.
* @param sum The sum of measuremnts being sumarized.
* @param percentileValues Calculations of percentile values from measurements.
*/
public static DoubleSummaryPointData create(
long startEpochNanos,
long epochNanos,
@ -25,7 +37,13 @@ public abstract class DoubleSummaryPointData implements PointData {
double sum,
List<ValueAtPercentile> percentileValues) {
return new AutoValue_DoubleSummaryPointData(
startEpochNanos, epochNanos, attributes, count, sum, percentileValues);
startEpochNanos,
epochNanos,
attributes,
Collections.emptyList(),
count,
sum,
percentileValues);
}
DoubleSummaryPointData() {}

View File

@ -0,0 +1,53 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.data;
import io.opentelemetry.api.common.Attributes;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
/**
* A sample input measurement.
*
* <p>Exemplars also hold information about the environment when the measurement was recorded, for
* example the span and trace ID of the active span when the exemplar was recorded.
*/
@Immutable
public interface Exemplar {
/**
* The set of key/value pairs that were filtered out by the aggregator, but recorded alongside the
* original measurement. Only key/value pairs that were filtered out by the aggregator should be
* included
*/
Attributes getFilteredAttributes();
/** Returns the timestamp in nanos when measurement was collected. */
long getEpochNanos();
/**
* (Optional) Span ID of the exemplar trace.
*
* <p>Span ID may be {@code null} if the measurement is not recorded inside a trace or the trace
* was not sampled.
*/
@Nullable
String getSpanId();
/**
* (Optional) Trace ID of the exemplar trace.
*
* <p>Trace ID may be {@code null} if the measurement is not recorded inside a trace or if the
* trace is not sampled.
*/
@Nullable
String getTraceId();
/**
* Coerces this exemplar to a double value.
*
* <p>Note: This could createa a loss of precision from {@code long} measurements.
*/
double getValueAsDouble();
}

View File

@ -0,0 +1,45 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import javax.annotation.concurrent.Immutable;
/** An {@link Exemplar} with {@code long} measurments. */
@Immutable
@AutoValue
public abstract class LongExemplar implements Exemplar {
/**
* Construct a new exemplar.
*
* @param filteredAttributes The set of {@link Attributes} not already associated with the {@link
* PointData}.
* @param recordTimeNanos The time when the sample qas recorded in nanoseconds.
* @param spanId (optional) The associated SpanId.
* @param traceId (optional) The associated TraceId.
* @param value The value recorded.
*/
public static LongExemplar create(
Attributes filteredAttributes,
long recordTimeNanos,
String spanId,
String traceId,
long value) {
return new AutoValue_LongExemplar(filteredAttributes, recordTimeNanos, spanId, traceId, value);
}
LongExemplar() {}
/** Numerical value of the measurement that was recorded. */
public abstract long getValue();
@Override
public final double getValueAsDouble() {
return (double) getValue();
}
}

View File

@ -7,6 +7,8 @@ package io.opentelemetry.sdk.metrics.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/**
@ -28,8 +30,38 @@ public abstract class LongPointData implements PointData {
*/
public abstract long getValue();
/**
* Creates a {@link LongPointData}.
*
* @param startEpochNanos The starting time for the period where this point was sampled. Note:
* While start time is optional in OTLP, all SDKs should produce it for all their metrics, so
* it is required here.
* @param epochNanos The ending time for the period when this value was sampled.
* @param attributes The set of attributes associated with this point.
* @param value The value that was sampled.
*/
public static LongPointData create(
long startEpochNanos, long epochNanos, Attributes attributes, long value) {
return new AutoValue_LongPointData(startEpochNanos, epochNanos, attributes, value);
return create(startEpochNanos, epochNanos, attributes, value, Collections.emptyList());
}
/**
* Creates a {@link LongPointData}.
*
* @param startEpochNanos The starting time for the period where this point was sampled. Note:
* While start time is optional in OTLP, all SDKs should produce it for all their metrics, so
* it is required here.
* @param epochNanos The ending time for the period when this value was sampled.
* @param attributes The set of attributes associated with this point.
* @param value The value that was sampled.
* @param exemplars A collection of interesting sampled values from this time period.
*/
public static LongPointData create(
long startEpochNanos,
long epochNanos,
Attributes attributes,
long value,
List<Exemplar> exemplars) {
return new AutoValue_LongPointData(startEpochNanos, epochNanos, attributes, exemplars, value);
}
}

View File

@ -6,6 +6,7 @@
package io.opentelemetry.sdk.metrics.data;
import io.opentelemetry.api.common.Attributes;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/**
@ -38,4 +39,6 @@ public interface PointData {
* @return the attributes associated with this {@code Point}.
*/
Attributes getAttributes();
/** List of exemplars collected from measurements that were used to form the data point. */
List<Exemplar> getExemplars();
}