Merge :sdk:metrics-testing into :sdk:testing (#5144)

This commit is contained in:
jack-berg 2023-01-24 10:42:04 -06:00 committed by GitHub
parent ae7b0a917b
commit 59043802e6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 609 additions and 772 deletions

View File

@ -1,4 +1,34 @@
Comparing source compatibility of against
+++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramAssert (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW METHOD: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramAssert hasPointsSatisfying(java.util.function.Consumer[])
+++ NEW ANNOTATION: java.lang.SafeVarargs
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramAssert hasPointsSatisfying(java.lang.Iterable)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramAssert isCumulative()
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramAssert isDelta()
+++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramBucketsAssert (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramBucketsAssert hasCounts(java.util.List)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramBucketsAssert hasOffset(int)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramBucketsAssert hasTotalCount(long)
+++ NEW CLASS: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert (not serializable)
+++ CLASS FILE FORMAT VERSION: 52.0 <- n.a.
+++ NEW SUPERCLASS: io.opentelemetry.sdk.testing.assertj.AbstractPointAssert
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasCount(long)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasExemplars(io.opentelemetry.sdk.metrics.data.DoubleExemplarData[])
+++ NEW METHOD: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasExemplarsSatisfying(java.util.function.Consumer[])
+++ NEW ANNOTATION: java.lang.SafeVarargs
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasExemplarsSatisfying(java.lang.Iterable)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasMax(double)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasMin(double)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasNegativeBucketsSatisfying(java.util.function.Consumer)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasPositiveBucketsSatisfying(java.util.function.Consumer)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasScale(int)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasSum(double)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.ExponentialHistogramPointAssert hasZeroCount(long)
*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.testing.assertj.MetricAssert (not serializable)
=== CLASS FILE FORMAT VERSION: 52.0 <- 52.0
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.MetricAssert hasExponentialHistogramSatisfying(java.util.function.Consumer)
*** MODIFIED CLASS: PUBLIC FINAL io.opentelemetry.sdk.testing.assertj.TracesAssert (not serializable)
=== CLASS FILE FORMAT VERSION: 52.0 <- 52.0
+++ NEW METHOD: PUBLIC(+) STATIC(+) io.opentelemetry.sdk.testing.assertj.TracesAssert assertThat(java.util.List)

View File

@ -41,7 +41,6 @@ import io.opentelemetry.proto.metrics.v1.Summary;
import io.opentelemetry.proto.metrics.v1.SummaryDataPoint;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
@ -49,6 +48,7 @@ import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
@ -422,8 +422,8 @@ class MetricsRequestMarshalerTest {
1,
null,
null,
new TestExponentialHistogramBuckets(0, 0, Collections.emptyList()),
new TestExponentialHistogramBuckets(0, 0, Collections.emptyList()),
ImmutableExponentialHistogramBuckets.create(0, 0, Collections.emptyList()),
ImmutableExponentialHistogramBuckets.create(0, 0, Collections.emptyList()),
123,
456,
Attributes.empty(),
@ -434,8 +434,9 @@ class MetricsRequestMarshalerTest {
1,
3.3,
80.1,
new TestExponentialHistogramBuckets(0, 1, ImmutableList.of(1L, 0L, 2L)),
new TestExponentialHistogramBuckets(0, 0, Collections.emptyList()),
ImmutableExponentialHistogramBuckets.create(
0, 1, ImmutableList.of(1L, 0L, 2L)),
ImmutableExponentialHistogramBuckets.create(0, 0, Collections.emptyList()),
123,
456,
Attributes.of(stringKey("key"), "value"),
@ -837,9 +838,9 @@ class MetricsRequestMarshalerTest {
257,
20.1,
44.3,
new TestExponentialHistogramBuckets(
ImmutableExponentialHistogramBuckets.create(
20, -1, ImmutableList.of(0L, 128L, 1L << 32)),
new TestExponentialHistogramBuckets(
ImmutableExponentialHistogramBuckets.create(
20, 1, ImmutableList.of(0L, 128L, 1L << 32)),
123,
456,
@ -1149,41 +1150,4 @@ class MetricsRequestMarshalerTest {
}
return new String(bos.toByteArray(), StandardCharsets.UTF_8);
}
/**
* Helper class for creating Exponential Histogram bucket data directly without needing to record.
* Essentially, mocking out the bucket operations and downscaling.
*/
private static class TestExponentialHistogramBuckets implements ExponentialHistogramBuckets {
private final int scale;
private final int offset;
private final List<Long> bucketCounts;
TestExponentialHistogramBuckets(int scale, int offset, List<Long> bucketCounts) {
this.scale = scale;
this.offset = offset;
this.bucketCounts = bucketCounts;
}
@Override
public int getScale() {
return scale;
}
@Override
public int getOffset() {
return offset;
}
@Override
public List<Long> getBucketCounts() {
return bucketCounts;
}
@Override
public long getTotalCount() {
return getBucketCounts().stream().reduce(0L, Long::sum);
}
}
}

View File

@ -1,6 +0,0 @@
# OpenTelemetry Metrics SDK Testing
[![Javadocs][javadoc-image]][javadoc-url]
[javadoc-image]: https://www.javadoc.io/badge/io.opentelemetry/opentelemetry-sdk-metrics-testing.svg
[javadoc-url]: https://www.javadoc.io/doc/io.opentelemetry/opentelemetry-sdk-metrics-testing

View File

@ -1,21 +0,0 @@
plugins {
id("otel.java-conventions")
}
description = "OpenTelemetry Metrics SDK Testing utilities"
otelJava.moduleName.set("io.opentelemetry.sdk.metrics-testing")
dependencies {
api(project(":api:all"))
api(project(":sdk:all"))
api(project(":sdk:metrics"))
api(project(":sdk:testing"))
compileOnly("org.assertj:assertj-core")
compileOnly("junit:junit")
compileOnly("org.junit.jupiter:junit-jupiter-api")
annotationProcessor("com.google.auto.value:auto-value")
testImplementation("junit:junit")
}

View File

@ -1 +0,0 @@
otel.release=alpha

View File

@ -1,74 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import java.util.Arrays;
import java.util.stream.Collectors;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link PointData}. */
public class AbstractPointDataAssert<
PointAssertT extends AbstractPointDataAssert<PointAssertT, PointT>,
PointT extends PointData>
extends AbstractAssert<PointAssertT, PointT> {
protected AbstractPointDataAssert(PointT actual, Class<PointAssertT> assertClass) {
super(actual, assertClass);
}
/** Ensures the {@code start_epoch_nanos} field matches the expected value. */
public PointAssertT hasStartEpochNanos(long expected) {
isNotNull();
Assertions.assertThat(actual.getStartEpochNanos()).as("startEpochNanos").isEqualTo(expected);
return myself;
}
/** Ensures the {@code epoch_nanos} field matches the expected value. */
public PointAssertT hasEpochNanos(long expected) {
isNotNull();
Assertions.assertThat(actual.getEpochNanos()).as("epochNanos").isEqualTo(expected);
return myself;
}
/** Ensures the {@code attributes} field matches the expected value. */
public PointAssertT hasAttributes(Attributes expected) {
isNotNull();
Assertions.assertThat(actual.getAttributes()).as("attributes").isEqualTo(expected);
return myself;
}
/** Returns convenience API to assert against the {@code attributes} field. */
public AttributesAssert attributes() {
isNotNull();
return OpenTelemetryAssertions.assertThat(actual.getAttributes());
}
/** Returns convenience API to assert against the {@code exemplars} field. */
public AbstractIterableAssert<?, ? extends Iterable<? extends ExemplarData>, ExemplarData, ?>
exemplars() {
isNotNull();
return Assertions.assertThat(actual.getExemplars());
}
/**
* Ensures the {@code exemplars} field matches the expected value.
*
* @param exemplars The list of exemplars that will be checked, can be in any order.
*/
public PointAssertT hasExemplars(ExemplarData... exemplars) {
isNotNull();
// TODO(anuraaga): This code will be removed so use hacky approach to check ExemplarData.
Assertions.assertThat(
actual.getExemplars().stream().map(Object.class::cast).collect(Collectors.toList()))
.as("exemplars")
.containsExactlyInAnyOrderElementsOf(Arrays.asList(exemplars));
return myself;
}
}

View File

@ -1,67 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link ExemplarData}. */
public class ExemplarDataAssert extends AbstractAssert<ExemplarDataAssert, ExemplarData> {
protected ExemplarDataAssert(ExemplarData actual) {
super(actual, ExemplarDataAssert.class);
}
/** Ensures the {@code epochNanos} field matches the expected value. */
public ExemplarDataAssert hasEpochNanos(long expected) {
isNotNull();
Assertions.assertThat(actual.getEpochNanos()).as("epochNanos").isEqualTo(expected);
return this;
}
/** Ensures the {@code spanId} field matches the expected value. */
public ExemplarDataAssert hasSpanId(String expected) {
isNotNull();
Assertions.assertThat(actual.getSpanContext().getSpanId()).as("spanId").isEqualTo(expected);
return this;
}
/** Ensures the {@code traceId} field matches the expected value. */
public ExemplarDataAssert hasTraceId(String expected) {
isNotNull();
Assertions.assertThat(actual.getSpanContext().getTraceId()).as("traceId").isEqualTo(expected);
return this;
}
/** Ensures the {@code value} field matches the expected value. */
public ExemplarDataAssert hasValue(double expected) {
isNotNull();
double value =
actual instanceof DoubleExemplarData
? ((DoubleExemplarData) actual).getValue()
: ((LongExemplarData) actual).getValue();
Assertions.assertThat(value).as("value").isEqualTo(expected);
return this;
}
/** Ensures the {@code fitleredAttributes} field matches the expected value. */
public ExemplarDataAssert hasFilteredAttributes(Attributes expected) {
isNotNull();
Assertions.assertThat(actual.getFilteredAttributes())
.as("filtered_attributes")
.isEqualTo(expected);
return this;
}
/** Returns convenience API to assert against the {@code filteredAttributes} field. */
public AttributesAssert filteredAttributes() {
isNotNull();
return OpenTelemetryAssertions.assertThat(actual.getFilteredAttributes());
}
}

View File

@ -1,60 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link ExponentialHistogramPointData}. */
public class ExponentialHistogramPointDataAssert
extends AbstractPointDataAssert<
ExponentialHistogramPointDataAssert, ExponentialHistogramPointData> {
protected ExponentialHistogramPointDataAssert(ExponentialHistogramPointData actual) {
super(actual, ExponentialHistogramPointDataAssert.class);
}
/** Ensures the {@code sum} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasSum(double expected) {
isNotNull();
Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected);
return this;
}
/** Ensures the {@code min} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasMin(double expected) {
isNotNull();
Assertions.assertThat(actual.getMin()).as("min").isEqualTo(expected);
return this;
}
/** Ensures the {@code min} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasMax(double expected) {
isNotNull();
Assertions.assertThat(actual.getMax()).as("max").isEqualTo(expected);
return this;
}
/** Ensures the {@code count} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected);
return this;
}
/** Ensures the {@code scale} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasScale(int expected) {
isNotNull();
Assertions.assertThat(actual.getScale()).as("scale").isEqualTo(expected);
return this;
}
/** Ensures the {@code zeroCount} field matches the expected value. */
public ExponentialHistogramPointDataAssert hasZeroCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getZeroCount()).as("zeroCount").isEqualTo(expected);
return this;
}
}

View File

@ -1,37 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import org.assertj.core.api.Assertions;
/** Test assertions for data heading to exporters within the Metrics SDK. */
public final class MetricAssertions extends Assertions {
/** Returns an assertion for {@link MetricData}. */
public static MetricDataAssert assertThat(MetricData metric) {
return new MetricDataAssert(metric);
}
/** Returns an assertion for {@link ExponentialHistogramPointData}. */
public static ExponentialHistogramPointDataAssert assertThat(
ExponentialHistogramPointData point) {
return new ExponentialHistogramPointDataAssert(point);
}
/** Returns an assertion for {@link ExponentialHistogramBuckets}. */
public static ExponentialHistogramBucketsAssert assertThat(ExponentialHistogramBuckets buckets) {
return new ExponentialHistogramBucketsAssert(buckets);
}
public static ExemplarDataAssert assertThat(ExemplarData exemplar) {
return new ExemplarDataAssert(exemplar);
}
private MetricAssertions() {}
}

View File

@ -1,111 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.resources.Resource;
import org.assertj.core.api.AbstractAssert;
/** Test assertions for {@link MetricData}. */
public class MetricDataAssert extends AbstractAssert<MetricDataAssert, MetricData> {
protected MetricDataAssert(MetricData actual) {
super(actual, MetricDataAssert.class);
}
/** Ensures the {@link Resource} associated with a metric matches the expected value. */
public MetricDataAssert hasResource(Resource resource) {
isNotNull();
if (!actual.getResource().equals(resource)) {
failWithActualExpectedAndMessage(
actual,
"resource: " + resource,
"Expected MetricData to have resource <%s> but found <%s>",
resource,
actual.getResource());
}
return this;
}
/**
* Ensures the {@link InstrumentationScopeInfo} associated with a metric matches the expected
* value.
*/
public MetricDataAssert hasInstrumentationScope(
InstrumentationScopeInfo instrumentationScopeInfo) {
isNotNull();
if (!actual.getInstrumentationScopeInfo().equals(instrumentationScopeInfo)) {
failWithActualExpectedAndMessage(
actual,
"instrumentation scope: " + instrumentationScopeInfo,
"Expected MetricData to have resource <%s> but found <%s>",
instrumentationScopeInfo,
actual.getInstrumentationScopeInfo());
}
return this;
}
/** Ensures the {@code name} field matches the expected value. */
public MetricDataAssert hasName(String name) {
isNotNull();
if (!actual.getName().equals(name)) {
failWithActualExpectedAndMessage(
actual,
"name: " + name,
"Expected MetricData to have name <%s> but found <%s>",
name,
actual.getName());
}
return this;
}
/** Ensures the {@code description} field matches the expected value. */
public MetricDataAssert hasDescription(String description) {
isNotNull();
if (!actual.getDescription().equals(description)) {
failWithActualExpectedAndMessage(
actual,
"description: " + description,
"Expected MetricData to have description <%s> but found <%s>",
description,
actual.getDescription());
}
return this;
}
/** Ensures the {@code unit} field matches the expected value. */
public MetricDataAssert hasUnit(String unit) {
isNotNull();
if (!actual.getUnit().equals(unit)) {
failWithActualExpectedAndMessage(
actual,
"unit: " + unit,
"Expected MetricData to have unit <%s> but found <%s>",
unit,
actual.getUnit());
}
return this;
}
/**
* Ensures this {@link MetricData} is a {@code ExponentialHistogram}.
*
* @return convenience API to assert against the {@code ExponentialHistogram}.
*/
public ExponentialHistogramAssert hasExponentialHistogram() {
isNotNull();
if (actual.getType() != MetricDataType.EXPONENTIAL_HISTOGRAM) {
failWithActualExpectedAndMessage(
actual,
"type: EXPONENTIAL_HISTOGRAM",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.EXPONENTIAL_HISTOGRAM,
actual.getType());
}
return new ExponentialHistogramAssert(actual.getExponentialHistogramData());
}
}

View File

@ -1,9 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
@ParametersAreNonnullByDefault
package io.opentelemetry.sdk.testing.assertj;
import javax.annotation.ParametersAreNonnullByDefault;

View File

@ -1,116 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Collections;
import org.junit.jupiter.api.Test;
public class MetricAssertionsTest {
private static final Resource RESOURCE =
Resource.create(Attributes.of(stringKey("resource_key"), "resource_value"));
private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO =
InstrumentationScopeInfo.create("instrumentation_library");
private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC =
ImmutableMetricData.createExponentialHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "exponential_histogram",
/* description= */ "description",
/* unit= */ "unit",
ImmutableExponentialHistogramData.create(
AggregationTemporality.CUMULATIVE,
// Points
Collections.emptyList()));
private static final MetricData EXPONENTIAL_HISTOGRAM_DELTA_METRIC =
ImmutableMetricData.createExponentialHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "exponential_histogram_delta",
/* description= */ "description",
/* unit= */ "unit",
ImmutableExponentialHistogramData.create(
AggregationTemporality.DELTA,
// Points
Collections.emptyList()));
private static final MetricData DOUBLE_GAUGE_METRIC =
ImmutableMetricData.createDoubleGauge(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "gauge",
/* description= */ "description",
/* unit= */ "unit",
ImmutableGaugeData.create(
// Points
Collections.emptyList()));
@Test
void metric_passing() {
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("exponential_histogram")
.hasDescription("description")
.hasUnit("unit");
}
@Test
void metric_fails() {
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasResource(
Resource.create(Attributes.of(stringKey("monkey_key"), "resource_value"))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("instrumentation_library_for_monkeys")))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasName("Monkeys"))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasDescription("Monkeys"))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasUnit("Monkeys"))
.isInstanceOf(AssertionError.class);
}
@Test
void exponential_histogram_passing() {
assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasExponentialHistogram().isCumulative();
assertThat(EXPONENTIAL_HISTOGRAM_DELTA_METRIC).hasExponentialHistogram().isDelta();
}
@Test
void exponential_histogram_fails() {
assertThatThrownBy(() -> assertThat(DOUBLE_GAUGE_METRIC).hasExponentialHistogram())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasExponentialHistogram().isDelta())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_DELTA_METRIC)
.hasExponentialHistogram()
.isCumulative())
.isInstanceOf(AssertionError.class);
}
}

View File

@ -22,7 +22,6 @@ dependencies {
testAnnotationProcessor("com.google.auto.value:auto-value")
testImplementation(project(":sdk:metrics-testing"))
testImplementation(project(":sdk:testing"))
testImplementation("com.google.guava:guava")

View File

@ -0,0 +1,35 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.data;
import com.google.auto.value.AutoValue;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/**
* Auto value implementation of {@link ExponentialHistogramBuckets}.
*
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time.
*/
@AutoValue
@Immutable
public abstract class ImmutableExponentialHistogramBuckets implements ExponentialHistogramBuckets {
ImmutableExponentialHistogramBuckets() {}
/**
* Create a ExponentialHistogramBuckets.
*
* @return a ExponentialHistogramBuckets.
*/
@SuppressWarnings("TooManyParameters")
public static ExponentialHistogramBuckets create(int scale, int offset, List<Long> bucketCounts) {
return new AutoValue_ImmutableExponentialHistogramBuckets(
scale, offset, bucketCounts, bucketCounts.stream().mapToLong(l -> l).sum());
}
}

View File

@ -33,7 +33,7 @@ public abstract class ImmutableExponentialHistogramPointData
* @return a DoubleExponentialHistogramPointData.
*/
@SuppressWarnings("TooManyParameters")
public static ImmutableExponentialHistogramPointData create(
public static ExponentialHistogramPointData create(
int scale,
double sum,
long zeroCount,

View File

@ -20,7 +20,6 @@ import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleHistogram;
import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
@ -226,52 +225,61 @@ class SdkDoubleHistogramTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric ->
MetricAssertions.assertThat(metric)
assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDescription("description")
.hasUnit("ms")
.hasExponentialHistogram()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point -> {
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(25)
.hasMin(12)
.hasMax(13)
.hasScale(5)
.hasZeroCount(0);
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(114)
.hasCounts(Arrays.asList(1L, 0L, 0L, 0L, 1L));
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
},
point -> {
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("key", "value").build())
.hasCount(1)
.hasSum(12)
.hasMin(12)
.hasMax(12)
.hasScale(20)
.hasZeroCount(0);
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(3759105)
.hasCounts(Collections.singletonList(1L));
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
}));
.hasExponentialHistogramSatisfying(
expHistogram ->
expHistogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(25)
.hasMin(12)
.hasMax(13)
.hasScale(5)
.hasZeroCount(0)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasOffset(114)
.hasCounts(
Arrays.asList(1L, 0L, 0L, 0L, 1L)))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasOffset(0)
.hasCounts(Collections.emptyList())),
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(
Attributes.builder().put("key", "value").build())
.hasCount(1)
.hasSum(12)
.hasMin(12)
.hasMax(12)
.hasScale(20)
.hasZeroCount(0)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasOffset(3759105)
.hasCounts(Collections.singletonList(1L)))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasOffset(0)
.hasCounts(Collections.emptyList())))));
}
@Test

View File

@ -20,7 +20,6 @@ import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongHistogram;
import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
@ -227,52 +226,61 @@ class SdkLongHistogramTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric ->
MetricAssertions.assertThat(metric)
assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDescription("description")
.hasUnit("ms")
.hasExponentialHistogram()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point -> {
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(25)
.hasMin(12)
.hasMax(13)
.hasScale(5)
.hasZeroCount(0);
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(114)
.hasCounts(Arrays.asList(1L, 0L, 0L, 0L, 1L));
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
},
point -> {
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("key", "value").build())
.hasCount(1)
.hasSum(12)
.hasMin(12)
.hasMax(12)
.hasScale(20)
.hasZeroCount(0);
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(3759105)
.hasCounts(Collections.singletonList(1L));
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
}));
.hasExponentialHistogramSatisfying(
expHistogram ->
expHistogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(25)
.hasMin(12)
.hasMax(13)
.hasScale(5)
.hasZeroCount(0)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasOffset(114)
.hasCounts(
Arrays.asList(1L, 0L, 0L, 0L, 1L)))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasOffset(0)
.hasCounts(Collections.emptyList())),
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(
Attributes.builder().put("key", "value").build())
.hasCount(1)
.hasSum(12)
.hasMin(12)
.hasMax(12)
.hasScale(20)
.hasZeroCount(0)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasOffset(3759105)
.hasCounts(Collections.singletonList(1L)))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasOffset(0)
.hasCounts(Collections.emptyList())))));
}
@Test

View File

@ -23,7 +23,6 @@ import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -459,28 +458,32 @@ class DoubleExponentialHistogramAggregatorTest {
// Assertions run twice to verify immutability; recordings shouldn't modify the metric data
for (int i = 0; i < 2; i++) {
MetricAssertions.assertThat(metricDataCumulative)
.hasExponentialHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point -> {
MetricAssertions.assertThat(point)
.hasSum(123.456)
.hasScale(20)
.hasZeroCount(2)
.hasCount(3)
.hasMin(0)
.hasMax(123.456)
.hasExemplars(exemplar);
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasCounts(Collections.singletonList(1L))
.hasOffset(valueToIndex(20, 123.456))
.hasTotalCount(1);
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasTotalCount(0)
.hasCounts(Collections.emptyList());
});
assertThat(metricDataCumulative)
.hasExponentialHistogramSatisfying(
expHistogram ->
expHistogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasSum(123.456)
.hasScale(20)
.hasZeroCount(2)
.hasCount(3)
.hasMin(0)
.hasMax(123.456)
.hasExemplars(exemplar)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasCounts(Collections.singletonList(1L))
.hasOffset(valueToIndex(20, 123.456))
.hasTotalCount(1))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasTotalCount(0)
.hasCounts(Collections.emptyList()))));
aggregatorHandle.recordDouble(1);
aggregatorHandle.recordDouble(-1);
aggregatorHandle.recordDouble(0);
@ -537,12 +540,12 @@ class DoubleExponentialHistogramAggregatorTest {
assertThat(acc.getScale()).isEqualTo(3);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(3);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(3);
MetricAssertions.assertThat(acc.getPositiveBuckets())
.hasTotalCount(numberOfUpdates * 3)
.hasOffset(-27);
MetricAssertions.assertThat(acc.getNegativeBuckets())
.hasTotalCount(numberOfUpdates * 2)
.hasOffset(-27);
ExponentialHistogramBuckets positiveBuckets = acc.getPositiveBuckets();
assertThat(positiveBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 3);
assertThat(positiveBuckets.getOffset()).isEqualTo(-27);
ExponentialHistogramBuckets negativeBuckets = acc.getNegativeBuckets();
assertThat(negativeBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 2);
assertThat(negativeBuckets.getOffset()).isEqualTo(-27);
// Verify positive buckets have correct counts
List<Long> posCounts = acc.getPositiveBuckets().getBucketCounts();

View File

@ -8,7 +8,6 @@ package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
@ -27,7 +26,8 @@ class DoubleExponentialHistogramBucketsTest {
b.record(1);
b.record(1);
b.record(1);
MetricAssertions.assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L));
assertThat(b.getTotalCount()).isEqualTo(3);
assertThat(b.getBucketCounts()).isEqualTo(Collections.singletonList(3L));
}
@Test
@ -44,10 +44,9 @@ class DoubleExponentialHistogramBucketsTest {
b.record(2);
b.record(4);
assertThat(b.getScale()).isEqualTo(0);
MetricAssertions.assertThat(b)
.hasTotalCount(3)
.hasCounts(Arrays.asList(1L, 1L, 1L))
.hasOffset(-1);
assertThat(b.getTotalCount()).isEqualTo(3);
assertThat(b.getBucketCounts()).isEqualTo(Arrays.asList(1L, 1L, 1L));
assertThat(b.getOffset()).isEqualTo(-1);
}
@Test

View File

@ -16,7 +16,6 @@ import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Random;
@ -43,11 +42,11 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1.1));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getFilteredAttributes()).isEmpty();
assertThat(exemplar.getValue()).isEqualTo(1.1);
});
// Measurement count is reset, we should sample a new measurement (and only one)
clock.advance(Duration.ofSeconds(1));
@ -55,11 +54,11 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getFilteredAttributes()).isEmpty();
assertThat(exemplar.getValue()).isEqualTo(2);
});
}
@Test
@ -74,11 +73,11 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
reservoir.offerDoubleMeasurement(1.1, all, Context.root());
assertThat(reservoir.collectAndReset(partial))
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1.1)
.hasFilteredAttributes(remaining));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1.1);
assertThat(exemplar.getFilteredAttributes()).isEqualTo(remaining);
});
}
@Test
@ -97,13 +96,13 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
reservoir.offerDoubleMeasurement(1, all, context);
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(all)
.hasTraceId(TRACE_ID)
.hasSpanId(SPAN_ID));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1);
assertThat(exemplar.getFilteredAttributes()).isEqualTo(all);
assertThat(exemplar.getSpanContext().getTraceId()).isEqualTo(TRACE_ID);
assertThat(exemplar.getSpanContext().getSpanId()).isEqualTo(SPAN_ID);
});
}
@Test
@ -131,9 +130,13 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
reservoir.offerDoubleMeasurement(3, Attributes.of(key, 3L), Context.root());
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactlyInAnyOrder(
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(2);
},
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(3);
});
}
}

View File

@ -11,7 +11,6 @@ import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Arrays;
@ -36,22 +35,22 @@ class HistogramExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1.1));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1.1);
assertThat(exemplar.getFilteredAttributes()).isEmpty();
});
// Measurement count is reset, we should sample a new measurement (and only one)
clock.advance(Duration.ofSeconds(1));
reservoir.offerDoubleMeasurement(2, Attributes.empty(), Context.root());
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(2);
assertThat(exemplar.getFilteredAttributes()).isEmpty();
});
// only latest measurement is kept per-bucket
clock.advance(Duration.ofSeconds(1));
reservoir.offerDoubleMeasurement(3, Attributes.empty(), Context.root());
@ -59,11 +58,11 @@ class HistogramExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(4));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(4);
assertThat(exemplar.getFilteredAttributes()).isEmpty();
});
}
@Test
@ -79,21 +78,21 @@ class HistogramExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(4)
.satisfiesExactlyInAnyOrder(
e ->
MetricAssertions.assertThat(e)
.hasValue(-1.1)
.hasFilteredAttributes(Attributes.of(bucketKey, 0L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(1)
.hasFilteredAttributes(Attributes.of(bucketKey, 1L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(11)
.hasFilteredAttributes(Attributes.of(bucketKey, 2L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(21)
.hasFilteredAttributes(Attributes.of(bucketKey, 3L)));
e -> {
assertThat(e.getValue()).isEqualTo(-1.1);
assertThat(e.getFilteredAttributes()).isEqualTo(Attributes.of(bucketKey, 0L));
},
e -> {
assertThat(e.getValue()).isEqualTo(1);
assertThat(e.getFilteredAttributes()).isEqualTo(Attributes.of(bucketKey, 1L));
},
e -> {
assertThat(e.getValue()).isEqualTo(11);
assertThat(e.getFilteredAttributes()).isEqualTo(Attributes.of(bucketKey, 2L));
},
e -> {
assertThat(e.getValue()).isEqualTo(21);
assertThat(e.getFilteredAttributes()).isEqualTo(Attributes.of(bucketKey, 3L));
});
}
}

View File

@ -16,7 +16,6 @@ import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Random;
@ -43,11 +42,11 @@ class LongRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1);
assertThat(exemplar.getFilteredAttributes()).isEmpty();
});
// Measurement count is reset, we should sample a new measurement (and only one)
clock.advance(Duration.ofSeconds(1));
@ -55,11 +54,11 @@ class LongRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(1)
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(2);
assertThat(exemplar.getFilteredAttributes()).isEmpty();
});
}
@Test
@ -74,11 +73,11 @@ class LongRandomFixedSizeExemplarReservoirTest {
reservoir.offerLongMeasurement(1, all, Context.root());
assertThat(reservoir.collectAndReset(partial))
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(remaining));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1);
assertThat(exemplar.getFilteredAttributes()).isEqualTo(remaining);
});
}
@Test
@ -97,13 +96,13 @@ class LongRandomFixedSizeExemplarReservoirTest {
reservoir.offerLongMeasurement(1, all, context);
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactly(
exemplar ->
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(all)
.hasTraceId(TRACE_ID)
.hasSpanId(SPAN_ID));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(1);
assertThat(exemplar.getFilteredAttributes()).isEqualTo(all);
assertThat(exemplar.getSpanContext().getTraceId()).isEqualTo(TRACE_ID);
assertThat(exemplar.getSpanContext().getSpanId()).isEqualTo(SPAN_ID);
});
}
@Test
@ -131,9 +130,13 @@ class LongRandomFixedSizeExemplarReservoirTest {
reservoir.offerLongMeasurement(3, Attributes.of(key, 3L), Context.root());
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactlyInAnyOrder(
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(2);
},
exemplar -> {
assertThat(exemplar.getEpochNanos()).isEqualTo(clock.now());
assertThat(exemplar.getValue()).isEqualTo(3);
});
}
}

View File

@ -5,18 +5,23 @@
package io.opentelemetry.sdk.testing.assertj;
import static org.assertj.core.api.Assertions.assertThat;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import java.util.Arrays;
import java.util.function.Consumer;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link ExponentialHistogramData}. */
public class ExponentialHistogramAssert
/**
* Test assertions for {@link ExponentialHistogramData}.
*
* @since 1.23.0
*/
public final class ExponentialHistogramAssert
extends AbstractAssert<ExponentialHistogramAssert, ExponentialHistogramData> {
protected ExponentialHistogramAssert(ExponentialHistogramData actual) {
ExponentialHistogramAssert(ExponentialHistogramData actual) {
super(actual, ExponentialHistogramAssert.class);
}
@ -47,14 +52,26 @@ public class ExponentialHistogramAssert
return this;
}
/** Returns convenience API to assert against the {@code points} field. */
public AbstractIterableAssert<
?,
? extends Iterable<? extends ExponentialHistogramPointData>,
ExponentialHistogramPointData,
?>
points() {
isNotNull();
return Assertions.assertThat(actual.getPoints());
/**
* Asserts the exponential histogram has points matching all of the given assertions and no more,
* in any order.
*/
@SafeVarargs
@SuppressWarnings("varargs")
public final ExponentialHistogramAssert hasPointsSatisfying(
Consumer<ExponentialHistogramPointAssert>... assertions) {
return hasPointsSatisfying(Arrays.asList(assertions));
}
/**
* Asserts the exponential histogram has points matching all of the given assertions and no more,
* in any order.
*/
public ExponentialHistogramAssert hasPointsSatisfying(
Iterable<? extends Consumer<ExponentialHistogramPointAssert>> assertions) {
assertThat(actual.getPoints())
.satisfiesExactlyInAnyOrder(
AssertUtil.toConsumers(assertions, ExponentialHistogramPointAssert::new));
return this;
}
}

View File

@ -10,11 +10,15 @@ import java.util.List;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link ExponentialHistogramBuckets}. */
public class ExponentialHistogramBucketsAssert
/**
* Test assertions for {@link ExponentialHistogramBuckets}.
*
* @since 1.23.0
*/
public final class ExponentialHistogramBucketsAssert
extends AbstractAssert<ExponentialHistogramBucketsAssert, ExponentialHistogramBuckets> {
protected ExponentialHistogramBucketsAssert(ExponentialHistogramBuckets actual) {
ExponentialHistogramBucketsAssert(ExponentialHistogramBuckets actual) {
super(actual, ExponentialHistogramBucketsAssert.class);
}

View File

@ -0,0 +1,110 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import static org.assertj.core.api.Assertions.assertThat;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import java.util.Arrays;
import java.util.function.Consumer;
import org.assertj.core.api.Assertions;
/**
* Test assertions for {@link ExponentialHistogramPointData}.
*
* @since 1.23.0
*/
public final class ExponentialHistogramPointAssert
extends AbstractPointAssert<ExponentialHistogramPointAssert, ExponentialHistogramPointData> {
ExponentialHistogramPointAssert(ExponentialHistogramPointData actual) {
super(actual, ExponentialHistogramPointAssert.class);
}
/** Ensures the {@code sum} field matches the expected value. */
public ExponentialHistogramPointAssert hasSum(double expected) {
isNotNull();
Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected);
return this;
}
/** Ensures the {@code min} field matches the expected value. */
public ExponentialHistogramPointAssert hasMin(double expected) {
isNotNull();
Assertions.assertThat(actual.getMin()).as("min").isEqualTo(expected);
return this;
}
/** Ensures the {@code min} field matches the expected value. */
public ExponentialHistogramPointAssert hasMax(double expected) {
isNotNull();
Assertions.assertThat(actual.getMax()).as("max").isEqualTo(expected);
return this;
}
/** Ensures the {@code count} field matches the expected value. */
public ExponentialHistogramPointAssert hasCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected);
return this;
}
/** Ensures the {@code scale} field matches the expected value. */
public ExponentialHistogramPointAssert hasScale(int expected) {
isNotNull();
Assertions.assertThat(actual.getScale()).as("scale").isEqualTo(expected);
return this;
}
/** Ensures the {@code zeroCount} field matches the expected value. */
public ExponentialHistogramPointAssert hasZeroCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getZeroCount()).as("zeroCount").isEqualTo(expected);
return this;
}
/** Ensures the {@code positiveBuckets} field satisfies the provided assertion. */
public ExponentialHistogramPointAssert hasPositiveBucketsSatisfying(
Consumer<ExponentialHistogramBucketsAssert> assertion) {
isNotNull();
assertion.accept(new ExponentialHistogramBucketsAssert(actual.getPositiveBuckets()));
return this;
}
/** Ensures the {@code negativeBuckets} field satisfies the provided assertion. */
public ExponentialHistogramPointAssert hasNegativeBucketsSatisfying(
Consumer<ExponentialHistogramBucketsAssert> assertion) {
isNotNull();
assertion.accept(new ExponentialHistogramBucketsAssert(actual.getNegativeBuckets()));
return this;
}
/** Asserts the point has the specified exemplars, in any order. */
public ExponentialHistogramPointAssert hasExemplars(DoubleExemplarData... exemplars) {
isNotNull();
Assertions.assertThat(actual.getExemplars())
.as("exemplars")
.containsExactlyInAnyOrder(exemplars);
return myself;
}
/** Asserts the point has exemplars matching all of the assertions, in any order. */
@SafeVarargs
@SuppressWarnings("varargs")
public final ExponentialHistogramPointAssert hasExemplarsSatisfying(
Consumer<DoubleExemplarAssert>... assertions) {
return hasExemplarsSatisfying(Arrays.asList(assertions));
}
/** Asserts the point has exemplars matching all of the assertions, in any order. */
public ExponentialHistogramPointAssert hasExemplarsSatisfying(
Iterable<? extends Consumer<DoubleExemplarAssert>> assertions) {
isNotNull();
assertThat(actual.getExemplars())
.satisfiesExactlyInAnyOrder(AssertUtil.toConsumers(assertions, DoubleExemplarAssert::new));
return myself;
}
}

View File

@ -172,6 +172,27 @@ public final class MetricAssert extends AbstractAssert<MetricAssert, MetricData>
return this;
}
/**
* Asserts this {@link MetricData} is an exponential histogram that satisfies the provided
* assertion.
*
* @since 1.23.0
*/
public MetricAssert hasExponentialHistogramSatisfying(
Consumer<ExponentialHistogramAssert> assertion) {
isNotNull();
if (actual.getType() != MetricDataType.EXPONENTIAL_HISTOGRAM) {
failWithActualExpectedAndMessage(
actual,
"type: EXPONENTIAL_HISTOGRAM",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.EXPONENTIAL_HISTOGRAM,
actual.getType());
}
assertion.accept(new ExponentialHistogramAssert(actual.getExponentialHistogramData()));
return this;
}
/** Asserts this {@link MetricData} is a summary that satisfies the provided assertion. */
public MetricAssert hasSummarySatisfying(Consumer<SummaryAssert> assertion) {
isNotNull();

View File

@ -21,6 +21,7 @@ import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
@ -28,6 +29,9 @@ import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
@ -231,6 +235,44 @@ class MetricAssertionsTest {
// Points
Arrays.asList(HISTOGRAM_POINT_DATA)));
private static final ExponentialHistogramPointData EXPONENTIAL_HISTOGRAM_POINT_DATA =
ImmutableExponentialHistogramPointData.create(
1,
10.0,
1,
2.0,
4.0,
ImmutableExponentialHistogramBuckets.create(1, 10, Arrays.asList(1L, 2L)),
ImmutableExponentialHistogramBuckets.create(1, 0, Collections.emptyList()),
1,
2,
Attributes.empty(),
Arrays.asList(DOUBLE_EXEMPLAR1, DOUBLE_EXEMPLAR2));
private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC =
ImmutableMetricData.createExponentialHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "exponential_histogram",
/* description= */ "description",
/* unit= */ "unit",
ImmutableExponentialHistogramData.create(
AggregationTemporality.CUMULATIVE,
// Points
Collections.singletonList(EXPONENTIAL_HISTOGRAM_POINT_DATA)));
private static final MetricData EXPONENTIAL_HISTOGRAM_DELTA_METRIC =
ImmutableMetricData.createExponentialHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "exponential_histogram_delta",
/* description= */ "description",
/* unit= */ "unit",
ImmutableExponentialHistogramData.create(
AggregationTemporality.DELTA,
// Points
Collections.singletonList(EXPONENTIAL_HISTOGRAM_POINT_DATA)));
private static final SummaryPointData SUMMARY_POINT_DATA =
ImmutableSummaryPointData.create(
1,
@ -892,6 +934,101 @@ class MetricAssertionsTest {
.isInstanceOf(AssertionError.class);
}
@Test
void exponentialHistogram() {
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram ->
histogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasScale(1)
.hasSum(10.0)
.hasZeroCount(1)
.hasCount(4)
.hasMin(2.0)
.hasMax(4.0)
.hasPositiveBucketsSatisfying(
buckets ->
buckets
.hasOffset(10)
.hasCounts(Arrays.asList(1L, 2L))
.hasTotalCount(3))
.hasNegativeBucketsSatisfying(
buckets ->
buckets
.hasOffset(0)
.hasCounts(Collections.emptyList())
.hasTotalCount(0))
.hasStartEpochNanos(1)
.hasEpochNanos(2)
.hasAttributes(Attributes.empty())
.hasExemplars(DOUBLE_EXEMPLAR1, DOUBLE_EXEMPLAR2)
.hasExemplarsSatisfying(exemplar -> {}, exemplar -> {})));
assertThat(EXPONENTIAL_HISTOGRAM_DELTA_METRIC)
.hasExponentialHistogramSatisfying(ExponentialHistogramAssert::isDelta);
}
@Test
void exponentialHistogram_failure() {
assertThatThrownBy(
() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasDoubleGaugeSatisfying(gauge -> {}))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(ExponentialHistogramAssert::isDelta))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_DELTA_METRIC)
.hasExponentialHistogramSatisfying(ExponentialHistogramAssert::isCumulative))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> {}, point -> {})))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasSum(14.0))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasMax(8.0))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasMin(5.0))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasCount(4))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
.hasExponentialHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point.hasPositiveBucketsSatisfying(
buckets ->
buckets.hasCounts(Collections.singletonList(1L))))))
.isInstanceOf(AssertionError.class);
}
@Test
void summary() {
assertThat(SUMMARY_METRIC)

View File

@ -56,7 +56,6 @@ include(":sdk:common")
include(":sdk:logs")
include(":sdk:logs-testing")
include(":sdk:metrics")
include(":sdk:metrics-testing")
include(":sdk:testing")
include(":sdk:trace")
include(":sdk:trace-shaded-deps")