Convert tests to from :sdk:metrics-testing to :sdk:testing (#4444)

This commit is contained in:
jack-berg 2022-05-09 09:34:09 -05:00 committed by GitHub
parent 89c6323fff
commit e067223409
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 2594 additions and 3420 deletions

View File

@ -58,6 +58,10 @@ Comparing source compatibility of against
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasBucketBoundaries(double[])
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasBucketCounts(long[])
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasCount(long)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplars(io.opentelemetry.sdk.metrics.data.DoubleExemplarData[])
+++ NEW METHOD: PUBLIC(+) FINAL(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplarsSatisfying(java.util.function.Consumer[])
+++ NEW ANNOTATION: java.lang.SafeVarargs
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasExemplarsSatisfying(java.lang.Iterable)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasMax(double)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasMin(double)
+++ NEW METHOD: PUBLIC(+) io.opentelemetry.sdk.testing.assertj.HistogramPointAssert hasSum(double)

View File

@ -50,7 +50,6 @@ dependencies {
testRuntimeOnly("io.grpc:grpc-netty-shaded")
jmhImplementation(project(":sdk:testing"))
jmhImplementation(project(":sdk:metrics-testing"))
jmhImplementation(project(":sdk-extensions:resources"))
jmhImplementation("com.fasterxml.jackson.core:jackson-core")
jmhImplementation("io.opentelemetry.proto:opentelemetry-proto")

View File

@ -11,7 +11,6 @@ dependencies {
api("io.micrometer:micrometer-core")
testImplementation(project(":sdk:metrics-testing"))
testImplementation(project(":sdk:testing"))
}
@ -25,7 +24,6 @@ testing {
}
}
dependencies {
implementation(project(":sdk:metrics-testing"))
implementation(project(":sdk:testing"))
implementation(project.dependencies.enforcedPlatform("io.micrometer:micrometer-bom:1.5.17"))
@ -38,7 +36,6 @@ testing {
}
}
dependencies {
implementation(project(":sdk:metrics-testing"))
implementation(project(":sdk:testing"))
implementation(project.dependencies.enforcedPlatform("io.micrometer:micrometer-bom:1.6.13"))

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Counter;
@ -41,15 +41,14 @@ class CounterTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test counter")
.hasUnit("items")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(3)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(3)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(counter);
counter.increment();
@ -60,8 +59,7 @@ class CounterTest {
metric ->
assertThat(metric)
.hasName("testCounter")
.hasDoubleSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(3)));
.hasDoubleSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(3))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.DistributionSummary;
@ -42,27 +42,25 @@ class DistributionSummaryTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test distribution summary")
.hasUnit("things")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(7)
.hasCount(3)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(7)
.hasCount(3)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testSummary.max")
.hasDescription("This is a test distribution summary")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(4)
.attributes()
.containsEntry("tag", "value")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(4)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(summary);
@ -73,15 +71,14 @@ class DistributionSummaryTest {
metric ->
assertThat(metric)
.hasName("testSummary")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(7)
.hasCount(3)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(7)
.hasCount(3)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -109,44 +106,55 @@ class DistributionSummaryTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test distribution summary")
.hasUnit("things")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(555.5)
.hasCount(4)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
points ->
points
.hasSum(555.5)
.hasCount(4)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testSummary.max")
.hasDescription("This is a test distribution summary")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(500)
.attributes()
.containsEntry("tag", "value")),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(500)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testSummary.histogram")
.hasDoubleGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasValue(1).attributes().containsEntry("le", "1"),
point ->
assertThat(point).hasValue(2).attributes().containsEntry("le", "10"),
point ->
assertThat(point).hasValue(3).attributes().containsEntry("le", "100"),
point ->
assertThat(point)
.hasValue(4)
.attributes()
.containsEntry("le", "1000")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(
attributeEntry("le", "1"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(2)
.hasAttributes(
attributeEntry("le", "10"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(3)
.hasAttributes(
attributeEntry("le", "100"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(4)
.hasAttributes(
attributeEntry("le", "1000"),
attributeEntry("tag", "value")))));
}
@Test
@ -171,36 +179,42 @@ class DistributionSummaryTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test distribution summary")
.hasUnit("things")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(150)
.hasCount(2)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(150)
.hasCount(2)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testSummary.max")
.hasDescription("This is a test distribution summary")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(100)
.attributes()
.containsEntry("tag", "value")),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(100)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testSummary.percentile")
.hasDoubleGauge()
.points()
.anySatisfy(point -> assertThat(point).attributes().containsEntry("phi", "0.5"))
.anySatisfy(
point -> assertThat(point).attributes().containsEntry("phi", "0.95"))
.anySatisfy(
point -> assertThat(point).attributes().containsEntry("phi", "0.99")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("phi", "0.5"),
attributeEntry("tag", "value")),
point ->
point.hasAttributes(
attributeEntry("phi", "0.95"),
attributeEntry("tag", "value")),
point ->
point.hasAttributes(
attributeEntry("phi", "0.99"),
attributeEntry("tag", "value")))));
}
}

View File

@ -6,14 +6,15 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.FunctionCounter;
import io.micrometer.core.instrument.Metrics;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.internal.state.MetricStorageRegistry;
import java.util.concurrent.atomic.AtomicLong;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@ -43,56 +44,59 @@ class FunctionCounterTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function counter")
.hasUnit("items")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(12)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(counter);
assertThat(testing.collectAllMetrics()).isEmpty();
}
@Test
// TODO(anuraaga): Enable after https://github.com/open-telemetry/opentelemetry-java/pull/4222
@Disabled
void functionCountersWithSameNameAndDifferentTags() {
@SuppressLogger(MetricStorageRegistry.class)
void functionCountersWithSameNameAndDifferentDescriptions() {
FunctionCounter.builder("testFunctionCounterWithTags", num, AtomicLong::get)
.description("First description wins")
.description("First description")
.tags("tag", "1")
.baseUnit("items")
.register(Metrics.globalRegistry);
FunctionCounter.builder("testFunctionCounterWithTags", anotherNum, AtomicLong::get)
.description("ignored")
.description("Second description")
.tags("tag", "2")
.baseUnit("items")
.register(Metrics.globalRegistry);
assertThat(testing.collectAllMetrics())
.satisfiesExactly(
.satisfiesExactlyInAnyOrder(
metric ->
assertThat(metric)
.hasName("testFunctionCounterWithTags")
.hasDescription("First description wins")
.hasDescription("First description")
.hasUnit("items")
.hasDoubleSum()
.isMonotonic()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(12)
.attributes()
.containsOnly(attributeEntry("tag", "1")))
.anySatisfy(
point ->
assertThat(point)
.hasValue(13)
.attributes()
.containsOnly(attributeEntry("tag", "2"))));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12)
.hasAttributes(attributeEntry("tag", "1")))),
metric ->
assertThat(metric)
.hasName("testFunctionCounterWithTags")
.hasDescription("Second description")
.hasUnit("items")
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(13)
.hasAttributes(attributeEntry("tag", "2")))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.FunctionTimer;
@ -61,15 +61,14 @@ class FunctionTimerSecondsTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("1")
.hasLongSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testFunctionTimerSeconds.sum")
@ -77,14 +76,13 @@ class FunctionTimerSecondsTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("s")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(42)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(functionTimer);
assertThat(testing.collectAllMetrics()).isEmpty();

View File

@ -6,11 +6,12 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.FunctionTimer;
import io.micrometer.core.instrument.Metrics;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.BeforeEach;
@ -55,15 +56,14 @@ class FunctionTimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("1")
.hasLongSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testFunctionTimer.sum")
@ -71,14 +71,13 @@ class FunctionTimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("ms")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42_000)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(42_000)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(functionTimer);
assertThat(testing.collectAllMetrics()).isEmpty();
@ -102,9 +101,10 @@ class FunctionTimerTest {
assertThat(metric)
.hasName("testNanoFunctionTimer.sum")
.hasUnit("ms")
.hasDoubleSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1.234).attributes()));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasValue(1.234).hasAttributes(Attributes.empty()))));
}
@Test
@ -136,19 +136,16 @@ class FunctionTimerTest {
assertThat(metric)
.hasName("testFunctionTimerWithTags.sum")
.hasUnit("ms")
.hasDoubleSum()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(12_000)
.attributes()
.containsOnly(attributeEntry("tag", "1")))
.anySatisfy(
point ->
assertThat(point)
.hasValue(42_000)
.attributes()
.containsOnly(attributeEntry("tag", "2"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(12_000)
.hasAttributes(attributeEntry("tag", "1")),
point ->
point
.hasValue(42_000)
.hasAttributes(attributeEntry("tag", "2")))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Gauge;
@ -43,14 +43,13 @@ class GaugeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test gauge")
.hasUnit("items")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleGaugeSatisfying(
doubleGauge ->
doubleGauge.hasPointsSatisfying(
point ->
point
.hasValue(42)
.hasAttributes(attributeEntry("tag", "value")))));
// when
Metrics.globalRegistry.remove(gauge);
@ -79,20 +78,13 @@ class GaugeTest {
.hasName("testGaugeWithTags")
.hasDescription("First description wins")
.hasUnit("items")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(12)
.attributes()
.containsOnly(attributeEntry("tag", "1")))
.anySatisfy(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "2"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasValue(12).hasAttributes(attributeEntry("tag", "1")),
point ->
point.hasValue(42).hasAttributes(attributeEntry("tag", "2")))));
}
@Test
@ -107,9 +99,8 @@ class GaugeTest {
metric ->
assertThat(metric)
.hasName("testWeakRefGauge")
.hasDoubleGauge()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(42)));
.hasDoubleGaugeSatisfying(
gauge -> gauge.hasPointsSatisfying(point -> point.hasValue(42))));
WeakReference<AtomicLong> numWeakRef = new WeakReference<>(num);
num = null;

View File

@ -6,14 +6,15 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.LongTaskTimer;
import io.micrometer.core.instrument.Metrics;
import io.micrometer.core.instrument.MockClock;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import java.time.Duration;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@ -57,11 +58,11 @@ class LongTaskTimerHistogramTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("tasks")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> assertThat(point).hasValue(3).attributes().isEmpty()),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point -> point.hasValue(3).hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerHistogram.duration")
@ -69,31 +70,31 @@ class LongTaskTimerHistogramTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("ms")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> {
assertThat(point).attributes().isEmpty();
// any value >0 - duration of currently running tasks
Assertions.assertThat(point.getValue()).isPositive();
}),
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasAttributes(Attributes.empty())
.satisfies(
pointData ->
assertThat(pointData.getValue())
.isPositive()))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerHistogram.histogram")
.hasInstrumentationScope(
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point).hasValue(2).attributes().containsEntry("le", "100"))
.anySatisfy(
point ->
assertThat(point)
.hasValue(3)
.attributes()
.containsEntry("le", "1000")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("le", "100")).hasValue(2),
point ->
point
.hasAttributes(attributeEntry("le", "1000"))
.hasValue(3))));
sample1.stop();
sample2.stop();
@ -109,11 +110,11 @@ class LongTaskTimerHistogramTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("tasks")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> assertThat(point).hasValue(0).attributes().isEmpty()),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point -> point.hasValue(0).hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerHistogram.duration")
@ -121,28 +122,22 @@ class LongTaskTimerHistogramTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("ms")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> {
assertThat(point).attributes().isEmpty();
// any value >0 - duration of currently running tasks
Assertions.assertThat(point.getValue()).isZero();
}),
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point -> point.hasValue(0).hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerHistogram.histogram")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point).hasValue(0).attributes().containsEntry("le", "100"))
.anySatisfy(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsEntry("le", "1000")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasValue(0).hasAttributes(attributeEntry("le", "100")),
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("le", "1000")))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.LongTaskTimer;
@ -51,15 +51,14 @@ class LongTaskTimerSecondsTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test long task timer")
.hasUnit("tasks")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerSeconds.duration")
@ -67,17 +66,17 @@ class LongTaskTimerSecondsTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test long task timer")
.hasUnit("s")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> {
assertThat(point)
.attributes()
.containsOnly(attributeEntry("tag", "value"));
// any value >0 - duration of currently running tasks
assertThat(point.getValue()).isPositive();
}));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasAttributes(attributeEntry("tag", "value"))
.satisfies(
pointData ->
assertThat(pointData.getValue())
.isPositive()))));
// when
TimeUnit.MILLISECONDS.sleep(100);
@ -89,25 +88,23 @@ class LongTaskTimerSecondsTest {
metric ->
assertThat(metric)
.hasName("testLongTaskTimerSeconds.active")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimerSeconds.duration")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))));
// when timer is removed from the registry
Metrics.globalRegistry.remove(timer);

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.LongTaskTimer;
@ -40,31 +40,30 @@ class LongTaskTimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test long task timer")
.hasUnit("tasks")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimer.duration")
.hasDescription("This is a test long task timer")
.hasUnit("ms")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> {
assertThat(point)
.attributes()
.containsOnly(attributeEntry("tag", "value"));
// any value >0 - duration of currently running tasks
assertThat(point.getValue()).isPositive();
}));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasAttributes(attributeEntry("tag", "value"))
.satisfies(
pointData ->
assertThat(pointData.getValue())
.isPositive()))));
// when
TimeUnit.MILLISECONDS.sleep(100);
@ -76,25 +75,23 @@ class LongTaskTimerTest {
metric ->
assertThat(metric)
.hasName("testLongTaskTimer.active")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testLongTaskTimer.duration")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))));
// when timer is removed from the registry
Metrics.globalRegistry.remove(timer);

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Measurement;
@ -56,109 +56,101 @@ class MeterTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.total_time")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.count")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.active")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.duration")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.max")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.value")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testMeter.unknown")
.hasDescription("This is a test meter")
.hasUnit("things")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12345)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(12345)
.hasAttributes(attributeEntry("tag", "value")))));
// when
Metrics.globalRegistry.remove(meter);

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Counter;
@ -69,13 +69,12 @@ class NamingConventionTest {
.hasName("test.renamedCounter")
.hasInstrumentationScope(
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -88,23 +87,20 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedSummary")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("test.tag", "test.value")))),
metric ->
assertThat(metric)
.hasName("test.renamedSummary.max")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -116,13 +112,12 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedFunctionCounter")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -141,23 +136,20 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedFunctionTimer.count")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))),
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("test.tag", "test.value")))),
metric ->
assertThat(metric)
.hasName("test.renamedFunctionTimer.sum")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -169,13 +161,12 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedGauge")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -188,23 +179,20 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedLongTaskTimer.active")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))),
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("test.tag", "test.value")))),
metric ->
assertThat(metric)
.hasName("test.renamedLongTaskTimer.duration")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
@Test
@ -217,22 +205,19 @@ class NamingConventionTest {
metric ->
assertThat(metric)
.hasName("test.renamedTimer")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("test.tag", "test.value")))),
metric ->
assertThat(metric)
.hasName("test.renamedTimer.max")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.attributes()
.containsOnly(attributeEntry("test.tag", "test.value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("test.tag", "test.value")))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Counter;
@ -66,15 +66,14 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test counter")
.hasUnit("items")
.hasDoubleSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(12)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(12)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -101,15 +100,14 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test summary")
.hasUnit("items")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(54)
.hasCount(2)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(54)
.hasCount(2)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testPrometheusSummary.items.max")
@ -117,14 +115,13 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test summary")
.hasUnit("items")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(42)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -153,15 +150,14 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("1")
.hasLongSum()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testPrometheusFunctionTimer.seconds.sum")
@ -169,14 +165,13 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test function timer")
.hasUnit("s")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(42)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -198,14 +193,13 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test gauge")
.hasUnit("items")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(42)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(42)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -230,15 +224,14 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test long task timer")
.hasUnit("tasks")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testPrometheusLongTaskTimer.seconds.duration")
@ -246,17 +239,17 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test long task timer")
.hasUnit("s")
.hasDoubleSum()
.isNotMonotonic()
.points()
.satisfiesExactly(
point -> {
assertThat(point)
.attributes()
.containsOnly(attributeEntry("tag", "value"));
// any value >0 - duration of currently running tasks
assertThat(point.getValue()).isPositive();
}));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasAttributes(attributeEntry("tag", "value"))
.satisfies(
pointData ->
assertThat(pointData.getValue())
.isPositive()))));
// when
TimeUnit.MILLISECONDS.sleep(100);
@ -268,25 +261,23 @@ class PrometheusModeTest {
metric ->
assertThat(metric)
.hasName("testPrometheusLongTaskTimer.seconds.active")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testPrometheusLongTaskTimer.seconds.duration")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(0)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(0)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -313,15 +304,14 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("s")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(16.789)
.hasCount(3)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(16.789)
.hasCount(3)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testPrometheusTimer.seconds.max")
@ -329,13 +319,12 @@ class PrometheusModeTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("s")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(10.789)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(10.789)
.hasAttributes(attributeEntry("tag", "value")))));
}
}

View File

@ -6,7 +6,7 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Metrics;
@ -51,28 +51,26 @@ class TimerSecondsTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("s")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(23.345)
.hasCount(3)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(23.345)
.hasCount(3)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimerSeconds.max")
.hasDescription("This is a test timer")
.hasUnit("s")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(12.345)
.attributes()
.containsEntry("tag", "value")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(12.345)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(timer);
timer.record(12, TimeUnit.SECONDS);
@ -82,8 +80,17 @@ class TimerSecondsTest {
metric ->
assertThat(metric)
.hasName("testTimerSeconds")
.hasDoubleHistogram()
.points()
.noneSatisfy(point -> assertThat(point).hasSum(35.345).hasCount(4)));
.hasInstrumentationScope(
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("s")
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(23.345)
.hasCount(3)
.hasAttributes(attributeEntry("tag", "value")))));
}
}

View File

@ -6,11 +6,12 @@
package io.opentelemetry.micrometer1shim;
import static io.opentelemetry.micrometer1shim.OpenTelemetryMeterRegistryBuilder.INSTRUMENTATION_NAME;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.micrometer.core.instrument.Metrics;
import io.micrometer.core.instrument.Timer;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import java.time.Duration;
import java.util.concurrent.TimeUnit;
@ -42,27 +43,25 @@ class TimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("ms")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(42_000)
.hasCount(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(42_000)
.hasCount(1)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimer.max")
.hasDescription("This is a test timer")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(42_000)
.attributes()
.containsEntry("tag", "value")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(42_000)
.hasAttributes(attributeEntry("tag", "value")))));
Metrics.globalRegistry.remove(timer);
timer.record(12, TimeUnit.SECONDS);
@ -74,15 +73,14 @@ class TimerTest {
metric ->
assertThat(metric)
.hasName("testTimer")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(42_000)
.hasCount(1)
.attributes()
.containsOnly(attributeEntry("tag", "value"))));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(42_000)
.hasCount(1)
.hasAttributes(attributeEntry("tag", "value")))));
}
@Test
@ -97,17 +95,21 @@ class TimerTest {
assertThat(metric)
.hasName("testNanoTimer")
.hasUnit("ms")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point).hasSum(1.234).hasCount(1).attributes().isEmpty()),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(1.234)
.hasCount(1)
.hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("testNanoTimer.max")
.hasDoubleGauge()
.points()
.anySatisfy(point -> assertThat(point).hasValue(1.234).attributes().isEmpty()));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point -> point.hasValue(1.234).hasAttributes(Attributes.empty()))));
}
@Test
@ -139,47 +141,55 @@ class TimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("ms")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(555500)
.hasCount(4)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(555500)
.hasCount(4)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimer.max")
.hasDescription("This is a test timer")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(500000)
.attributes()
.containsEntry("tag", "value")),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(500000)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimer.histogram")
.hasDoubleGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasValue(1).attributes().containsEntry("le", "1000"),
point ->
assertThat(point).hasValue(2).attributes().containsEntry("le", "10000"),
point ->
assertThat(point)
.hasValue(3)
.attributes()
.containsEntry("le", "100000"),
point ->
assertThat(point)
.hasValue(4)
.attributes()
.containsEntry("le", "1000000")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(
attributeEntry("le", "1000"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(2)
.hasAttributes(
attributeEntry("le", "10000"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(3)
.hasAttributes(
attributeEntry("le", "100000"),
attributeEntry("tag", "value")),
point ->
point
.hasValue(4)
.hasAttributes(
attributeEntry("le", "1000000"),
attributeEntry("tag", "value")))));
}
@Test
@ -204,36 +214,42 @@ class TimerTest {
InstrumentationScopeInfo.create(INSTRUMENTATION_NAME, null, null))
.hasDescription("This is a test timer")
.hasUnit("ms")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(150)
.hasCount(2)
.attributes()
.containsOnly(attributeEntry("tag", "value"))),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(150)
.hasCount(2)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimer.max")
.hasDescription("This is a test timer")
.hasDoubleGauge()
.points()
.anySatisfy(
point ->
assertThat(point)
.hasValue(100)
.attributes()
.containsEntry("tag", "value")),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(100)
.hasAttributes(attributeEntry("tag", "value")))),
metric ->
assertThat(metric)
.hasName("testTimer.percentile")
.hasDoubleGauge()
.points()
.anySatisfy(point -> assertThat(point).attributes().containsEntry("phi", "0.5"))
.anySatisfy(
point -> assertThat(point).attributes().containsEntry("phi", "0.95"))
.anySatisfy(
point -> assertThat(point).attributes().containsEntry("phi", "0.99")));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("phi", "0.5"),
attributeEntry("tag", "value")),
point ->
point.hasAttributes(
attributeEntry("phi", "0.95"),
attributeEntry("tag", "value")),
point ->
point.hasAttributes(
attributeEntry("phi", "0.99"),
attributeEntry("tag", "value")))));
}
}

View File

@ -17,7 +17,7 @@ dependencies {
api("io.opencensus:opencensus-exporter-metrics-util")
testImplementation(project(":sdk:all"))
testImplementation(project(":sdk:metrics-testing"))
testImplementation(project(":sdk:testing"))
testImplementation("io.opencensus:opencensus-impl")
testImplementation("io.opencensus:opencensus-contrib-exemplar-util")

View File

@ -5,7 +5,8 @@
package io.opentelemetry.opencensusshim;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThat;
import com.google.common.collect.ImmutableList;
@ -22,6 +23,7 @@ import io.opencensus.tags.TagMetadata;
import io.opencensus.tags.TagValue;
import io.opencensus.tags.Tagger;
import io.opencensus.tags.Tags;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricExporter;
import java.util.Comparator;
@ -112,51 +114,55 @@ class OpenTelemetryMetricExporterTest {
.hasName("double_gauge")
.hasDescription("double gauge")
.hasUnit("ms")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point).hasValue(60).attributes().hasSize(0)),
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(60)
.hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("double_sum")
.hasDescription("double sum")
.hasUnit("ms")
.hasDoubleSum()
.points()
.satisfiesExactly(
point ->
assertThat(point).hasValue(60).attributes().hasSize(0)),
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(60)
.hasAttributes(Attributes.empty()))),
metric ->
assertThat(metric)
.hasName("long_gauge")
.hasDescription("long gauge")
.hasUnit("ms")
.hasLongGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(50)
.attributes()
.hasSize(1)
.containsEntry(
tagKey.getName(), tagValue.asString())),
.hasLongGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(50)
.hasAttributes(
attributeEntry(
tagKey.getName(),
tagValue.asString())))),
metric ->
assertThat(metric)
.hasName("long_sum")
.hasDescription("long sum")
.hasUnit("ms")
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(50)
.attributes()
.hasSize(1)
.containsEntry(
tagKey.getName(), tagValue.asString()))));
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(50)
.hasAttributes(
attributeEntry(
tagKey.getName(),
tagValue.asString()))))));
} finally {
otelExporter.stop();
}

View File

@ -5,7 +5,8 @@
package io.opentelemetry.opencensusshim.internal.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opencensus.common.Timestamp;
import io.opencensus.metrics.LabelKey;
@ -25,7 +26,6 @@ import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.api.trace.TraceFlags;
import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections;
@ -65,10 +65,11 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.GAUGE_INT64,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))),
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(Value.longValue(4), Timestamp.fromMillis(2000))),
Timestamp.fromMillis(1000)));
assertThat(MetricAdapter.convert(RESOURCE, censusMetric))
@ -77,15 +78,15 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasLongGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1"))
.hasValue(4));
.hasLongGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasValue(4)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(attributeEntry("key1", "value1"))));
}
@Test
@ -97,10 +98,11 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.GAUGE_DOUBLE,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))),
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))),
Timestamp.fromMillis(1000)));
assertThat(MetricAdapter.convert(RESOURCE, censusMetric))
@ -109,15 +111,15 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1"))
.hasValue(4));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(attributeEntry("key1", "value1"))
.hasValue(4)));
}
@Test
@ -129,10 +131,11 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.CUMULATIVE_INT64,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(Point.create(Value.longValue(4), Timestamp.fromMillis(2000))),
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(Value.longValue(4), Timestamp.fromMillis(2000))),
Timestamp.fromMillis(1000)));
assertThat(MetricAdapter.convert(RESOURCE, censusMetric))
@ -141,17 +144,17 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasLongSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1"))
.hasValue(4));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(attributeEntry("key1", "value1"))
.hasValue(4)));
}
@Test
@ -163,10 +166,11 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.CUMULATIVE_DOUBLE,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))),
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(Value.doubleValue(4), Timestamp.fromMillis(2000))),
Timestamp.fromMillis(1000)));
assertThat(MetricAdapter.convert(RESOURCE, censusMetric))
@ -175,17 +179,17 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1"))
.hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(attributeEntry("key1", "value1"))
.hasValue(4)));
}
@Test
@ -203,10 +207,10 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.CUMULATIVE_DISTRIBUTION,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(
Value.distributionValue(
Distribution.create(
@ -233,30 +237,31 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasSum(5)
.hasCount(10)
.hasBucketBoundaries(2.0, 5.0)
.hasBucketCounts(2, 6, 2)
.hasExemplars(
ImmutableDoubleExemplarData.create(
Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0),
ImmutableDoubleExemplarData.create(
Attributes.empty(),
1000000,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
4.0)));
.hasHistogramSatisfying(
histogram ->
histogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasSum(5)
.hasCount(10)
.hasBucketBoundaries(2.0, 5.0)
.hasBucketCounts(2, 6, 2)
.hasExemplars(
ImmutableDoubleExemplarData.create(
Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0),
ImmutableDoubleExemplarData.create(
Attributes.empty(),
1000000,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
4.0))));
}
@Test
@ -268,10 +273,10 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.SUMMARY,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(
Value.summaryValue(
Summary.create(
@ -280,7 +285,7 @@ class MetricAdapterTest {
Summary.Snapshot.create(
10L,
5d,
Arrays.asList(
Collections.singletonList(
Summary.Snapshot.ValueAtPercentile.create(100.0, 200))))),
Timestamp.fromMillis(2000))),
Timestamp.fromMillis(1000)));
@ -291,17 +296,17 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleSummary()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(Attributes.of(AttributeKey.stringKey("key1"), "value1"))
.hasCount(10)
.hasSum(5)
.hasValues(ImmutableValueAtQuantile.create(1.0, 200)));
.hasSummarySatisfying(
summary ->
summary.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(1000000000)
.hasEpochNanos(2000000000)
.hasAttributes(attributeEntry("key1", "value1"))
.hasCount(10)
.hasSum(5)
.hasValuesSatisfying(value -> value.hasValue(200.0).hasQuantile(1.0))));
}
@Test
@ -319,10 +324,10 @@ class MetricAdapterTest {
"description",
"unit",
MetricDescriptor.Type.GAUGE_DISTRIBUTION,
Arrays.asList(LabelKey.create("key1", "desc1"))),
Collections.singletonList(LabelKey.create("key1", "desc1"))),
TimeSeries.create(
Arrays.asList(LabelValue.create("value1")),
Arrays.asList(
Collections.singletonList(LabelValue.create("value1")),
Collections.singletonList(
Point.create(
Value.distributionValue(
Distribution.create(
@ -348,29 +353,30 @@ class MetricAdapterTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleHistogram()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(2000000000)
.hasEpochNanos(2000000000)
.hasSum(5)
.hasCount(10)
.hasBucketBoundaries(2.0, 5.0)
.hasBucketCounts(2, 6, 2)
.hasExemplars(
ImmutableDoubleExemplarData.create(
Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0),
ImmutableDoubleExemplarData.create(
Attributes.empty(),
1000000,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
4.0)));
.hasHistogramSatisfying(
histogram ->
histogram
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(2000000000)
.hasEpochNanos(2000000000)
.hasSum(5)
.hasCount(10)
.hasBucketBoundaries(2.0, 5.0)
.hasBucketCounts(2, 6, 2)
.hasExemplars(
ImmutableDoubleExemplarData.create(
Attributes.empty(), 2000000, SpanContext.getInvalid(), 1.0),
ImmutableDoubleExemplarData.create(
Attributes.empty(),
1000000,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
4.0))));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.opencensusshim.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opencensus.contrib.exemplar.util.ExemplarUtils;
import io.opencensus.stats.Aggregation;
@ -76,24 +76,28 @@ class OpenCensusMetricProducerTest {
.hasName("task_latency_distribution")
.hasDescription("The distribution of the task latencies.")
.hasUnit("ms")
.hasDoubleHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(50)
.hasCount(1)
.hasBucketCounts(1, 0, 0, 0, 0, 0, 0)
.hasBucketBoundaries(
100d, 200d, 400d, 1000d, 2000d, 4000d)
.exemplars()
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
.hasFilteredAttributes(Attributes.empty())
.hasValue(50)
.hasTraceId(TRACE_ID.toLowerBase16())
.hasSpanId(SPAN_ID.toLowerBase16())))));
.hasHistogramSatisfying(
histogram ->
histogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasSum(50)
.hasCount(1)
.hasBucketCounts(1, 0, 0, 0, 0, 0, 0)
.hasBucketBoundaries(
100d, 200d, 400d, 1000d, 2000d, 4000d)
.hasExemplarsSatisfying(
exemplar ->
exemplar
.hasFilteredAttributes(
Attributes.empty())
.hasValue(50)
.hasTraceId(
TRACE_ID.toLowerBase16())
.hasSpanId(
SPAN_ID
.toLowerBase16()))))));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.opencensusshim.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opencensus.stats.Aggregation;
import io.opencensus.stats.Measure;
@ -48,7 +48,9 @@ class OpenCensusMetricsTest {
() ->
assertThat(reader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("otel.sum").hasLongSum(),
metric -> assertThat(metric).hasName("oc.sum").hasLongSum()));
metric ->
assertThat(metric).hasName("otel.sum").hasLongSumSatisfying(sum -> {}),
metric ->
assertThat(metric).hasName("oc.sum").hasLongSumSatisfying(sum -> {})));
}
}

View File

@ -9,7 +9,6 @@ dependencies {
implementation(project(":api:all"))
implementation(project(":sdk:all"))
implementation(project(":sdk:testing"))
implementation(project(":sdk:metrics-testing"))
implementation(project(":exporters:otlp:trace"))
implementation(project(":exporters:logging"))
implementation(project(":semconv"))

View File

@ -16,7 +16,6 @@ dependencies {
testImplementation(project(":sdk:testing"))
testImplementation(project(":sdk-extensions:autoconfigure"))
testImplementation(project(":sdk:metrics-testing"))
testImplementation("com.google.guava:guava")
}

View File

@ -5,7 +5,8 @@
package io.opentelemetry.sdk.viewconfig;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.mock;
@ -61,20 +62,17 @@ class ViewConfigCustomizerTest {
assertThat(reader.collectAllMetrics())
.satisfiesExactly(
metricData -> {
assertThat(metricData)
.hasLongSum()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasValue(1)
.hasAttributes(
Attributes.builder()
.put("foo", "val")
.put("bar", "val")
.build()));
});
metricData ->
assertThat(metricData)
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point
.hasValue(1)
.hasAttributes(
attributeEntry("foo", "val"),
attributeEntry("bar", "val")))));
}
@Test

View File

@ -1,25 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link DoublePointData}. */
public class DoublePointDataAssert
extends AbstractPointDataAssert<DoublePointDataAssert, DoublePointData> {
protected DoublePointDataAssert(DoublePointData actual) {
super(actual, DoublePointDataAssert.class);
}
/** Ensures the {@code as_double} field matches the expected value. */
public DoublePointDataAssert hasValue(double expected) {
isNotNull();
Assertions.assertThat(actual.getValue()).as("value").isEqualTo(expected);
return this;
}
}

View File

@ -1,24 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.GaugeData;
import io.opentelemetry.sdk.metrics.data.PointData;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link GaugeData}. */
public class GaugeAssert<T extends PointData> extends AbstractAssert<GaugeAssert<T>, GaugeData<T>> {
protected GaugeAssert(GaugeData<T> actual) {
super(actual, GaugeAssert.class);
}
public AbstractIterableAssert<?, ? extends Iterable<? extends T>, T, ?> points() {
isNotNull();
return Assertions.assertThat(actual.getPoints());
}
}

View File

@ -1,57 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.HistogramData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link HistogramData}. */
public class HistogramAssert extends AbstractAssert<HistogramAssert, HistogramData> {
protected HistogramAssert(HistogramData actual) {
super(actual, HistogramAssert.class);
}
/** Ensures that {@code aggregation_temporality} field is {@code CUMULATIVE}. */
public HistogramAssert isCumulative() {
isNotNull();
if (actual.getAggregationTemporality() != AggregationTemporality.CUMULATIVE) {
failWithActualExpectedAndMessage(
actual,
"aggregationTemporality: CUMULATIVE",
"Expected Histogram to have cumulative aggregation but found <%s>",
AggregationTemporality.CUMULATIVE,
actual.getAggregationTemporality());
}
return this;
}
/** Ensures that {@code aggregation_temporality} field is {@code DELTA}. */
public HistogramAssert isDelta() {
isNotNull();
if (actual.getAggregationTemporality() != AggregationTemporality.DELTA) {
failWithActualExpectedAndMessage(
actual,
"aggregationTemporality: DELTA",
"Expected Histgram to have cumulative aggregation but found <%s>",
AggregationTemporality.DELTA,
actual.getAggregationTemporality());
}
return this;
}
/** Returns convenience API to assert against the {@code points} field. */
public AbstractIterableAssert<
?, ? extends Iterable<? extends HistogramPointData>, HistogramPointData, ?>
points() {
isNotNull();
return Assertions.assertThat(actual.getPoints());
}
}

View File

@ -1,80 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import java.util.Arrays;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link HistogramPointData}. */
public class HistogramPointDataAssert
extends AbstractPointDataAssert<HistogramPointDataAssert, HistogramPointData> {
protected HistogramPointDataAssert(HistogramPointData actual) {
super(actual, HistogramPointDataAssert.class);
}
/** Ensures the {@code sum} field matches the expected value. */
public HistogramPointDataAssert hasSum(double expected) {
isNotNull();
Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected);
return this;
}
/** Ensures the {@code sum} field contains a greater value than the passed {@code boundary}. */
public HistogramPointDataAssert hasSumGreaterThan(double boundary) {
isNotNull();
Assertions.assertThat(actual.getSum()).as("sum").isGreaterThan(boundary);
return this;
}
/** Ensures the {@code min} field matches the expected value. */
public HistogramPointDataAssert hasMin(double expected) {
isNotNull();
Assertions.assertThat(actual.hasMin()).isTrue();
Assertions.assertThat(actual.getMin()).as("min").isEqualTo(expected);
return this;
}
/** Ensures the {@code max} field matches the expected value. */
public HistogramPointDataAssert hasMax(double expected) {
isNotNull();
Assertions.assertThat(actual.hasMax()).isTrue();
Assertions.assertThat(actual.getMax()).as("max").isEqualTo(expected);
return this;
}
/** Ensures the {@code count} field matches the expected value. */
public HistogramPointDataAssert hasCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected);
return this;
}
/**
* Ensures the {@code boundaries} field matches the expected value.
*
* @param boundaries The set of bucket boundaries in the same order as the expected collection.
*/
public HistogramPointDataAssert hasBucketBoundaries(double... boundaries) {
isNotNull();
Double[] bigBoundaries = Arrays.stream(boundaries).boxed().toArray(Double[]::new);
Assertions.assertThat(actual.getBoundaries()).as("boundaries").containsExactly(bigBoundaries);
return this;
}
/**
* Ensures the {@code counts} field matches the expected value.
*
* @param counts The set of bucket counts in the same order as the expected collection.
*/
public HistogramPointDataAssert hasBucketCounts(long... counts) {
isNotNull();
Long[] bigCounts = Arrays.stream(counts).boxed().toArray(Long[]::new);
Assertions.assertThat(actual.getCounts()).as("bucketCounts").containsExactly(bigCounts);
return this;
}
}

View File

@ -1,25 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link LongPointData}. */
public class LongPointDataAssert
extends AbstractPointDataAssert<LongPointDataAssert, LongPointData> {
protected LongPointDataAssert(LongPointData actual) {
super(actual, LongPointDataAssert.class);
}
/** Ensures the {@code as_int} field matches the expected value. */
public LongPointDataAssert hasValue(long expected) {
isNotNull();
Assertions.assertThat(actual.getValue()).as("value").isEqualTo(expected);
return this;
}
}

View File

@ -5,17 +5,8 @@
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.GaugeData;
import io.opentelemetry.sdk.metrics.data.HistogramData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.data.SumData;
import io.opentelemetry.sdk.metrics.data.SummaryData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramPointData;
import org.assertj.core.api.Assertions;
@ -27,34 +18,6 @@ public final class MetricAssertions extends Assertions {
return new MetricDataAssert(metric);
}
/** Returns an assertion for {@link GaugeData}. */
// There is no real use case for passing in a GaugeData that is a lambda, if for some reason it is
// desired a cast will still work.
@SuppressWarnings("FunctionalInterfaceClash")
public static <T extends PointData> GaugeAssert<T> assertThat(GaugeData<T> metric) {
return new GaugeAssert<>(metric);
}
/** Returns an assertion for {@link HistogramData}. */
public static HistogramAssert assertThat(HistogramData metric) {
return new HistogramAssert(metric);
}
/** Returns an assertion for {@link SummaryData}. */
public static SummaryDataAssert assertThat(SummaryData metric) {
return new SummaryDataAssert(metric);
}
/** Returns an assertion for {@link HistogramPointData}. */
public static HistogramPointDataAssert assertThat(HistogramPointData point) {
return new HistogramPointDataAssert(point);
}
/** Returns an assertion for {@link SummaryPointData}. */
public static SummaryPointDataAssert assertThat(SummaryPointData point) {
return new SummaryPointDataAssert(point);
}
/** Returns an assertion for {@link ExponentialHistogramPointData}. */
public static ExponentialHistogramPointDataAssert assertThat(
ExponentialHistogramPointData point) {
@ -66,21 +29,6 @@ public final class MetricAssertions extends Assertions {
return new ExponentialHistogramBucketsAssert(buckets);
}
/** Returns an assertion for {@link DoublePointData}. */
public static DoublePointDataAssert assertThat(DoublePointData point) {
return new DoublePointDataAssert(point);
}
/** Returns an assertion for {@link SumData}. */
public static <T extends PointData> SumDataAssert<T> assertThat(SumData<T> point) {
return new SumDataAssert<>(point);
}
/** Returns an assertion for {@link LongPointData}. */
public static LongPointDataAssert assertThat(LongPointData point) {
return new LongPointDataAssert(point);
}
public static ExemplarDataAssert assertThat(ExemplarData exemplar) {
return new ExemplarDataAssert(exemplar);
}

View File

@ -6,8 +6,6 @@
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData;
@ -94,24 +92,6 @@ public class MetricDataAssert extends AbstractAssert<MetricDataAssert, MetricDat
return this;
}
/**
* Ensures this {@link MetricData} is a {@code DoubleHistogram}.
*
* @return convenience API to assert against the {@code DoubleHistogram}.
*/
public HistogramAssert hasDoubleHistogram() {
isNotNull();
if (actual.getType() != MetricDataType.HISTOGRAM) {
failWithActualExpectedAndMessage(
actual,
"type: HISTOGRAM",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.HISTOGRAM,
actual.getType());
}
return new HistogramAssert(actual.getHistogramData());
}
/**
* Ensures this {@link MetricData} is a {@code ExponentialHistogram}.
*
@ -129,94 +109,4 @@ public class MetricDataAssert extends AbstractAssert<MetricDataAssert, MetricDat
}
return new ExponentialHistogramAssert(ExponentialHistogramData.fromMetricData(actual));
}
/**
* Ensures this {@link MetricData} is a {@code DoubleGauge}.
*
* @return convenience API to assert against the {@code DoubleGauge}.
*/
public GaugeAssert<DoublePointData> hasDoubleGauge() {
isNotNull();
if (actual.getType() != MetricDataType.DOUBLE_GAUGE) {
failWithActualExpectedAndMessage(
actual,
"type: DOUBLE_GAUGE",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.DOUBLE_GAUGE,
actual.getType());
}
return new GaugeAssert<>(actual.getDoubleGaugeData());
}
/**
* Ensures this {@link MetricData} is a {@code DoubleSum}.
*
* @return convenience API to assert against the {@code DoubleSum}.
*/
public SumDataAssert<DoublePointData> hasDoubleSum() {
isNotNull();
if (actual.getType() != MetricDataType.DOUBLE_SUM) {
failWithActualExpectedAndMessage(
actual,
"type: DOUBLE_SUM",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.DOUBLE_SUM,
actual.getType());
}
return new SumDataAssert<>(actual.getDoubleSumData());
}
/**
* Ensures this {@link MetricData} is a {@code LongGauge}.
*
* @return convenience API to assert against the {@code LongGauge}.
*/
public GaugeAssert<LongPointData> hasLongGauge() {
isNotNull();
if (actual.getType() != MetricDataType.LONG_GAUGE) {
failWithActualExpectedAndMessage(
actual,
"type: LONG_GAUGE",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.LONG_GAUGE,
actual.getType());
}
return new GaugeAssert<>(actual.getLongGaugeData());
}
/**
* Ensures this {@link MetricData} is a {@code LongSum}.
*
* @return convenience API to assert against the {@code LongSum}.
*/
public SumDataAssert<LongPointData> hasLongSum() {
isNotNull();
if (actual.getType() != MetricDataType.LONG_SUM) {
failWithActualExpectedAndMessage(
actual,
"type: LONG_SUM",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.LONG_SUM,
actual.getType());
}
return new SumDataAssert<>(actual.getLongSumData());
}
/**
* Ensures this {@link MetricData} is a {@code DoubleSummaryData}.
*
* @return convenience API to assert against the {@code DoubleSummaryData}.
*/
public SummaryDataAssert hasDoubleSummary() {
isNotNull();
if (actual.getType() != MetricDataType.SUMMARY) {
failWithActualExpectedAndMessage(
actual,
"type: SUMMARY",
"Expected MetricData to have type <%s> but found <%s>",
MetricDataType.SUMMARY,
actual.getType());
}
return new SummaryDataAssert(actual.getSummaryData());
}
}

View File

@ -1,76 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.data.SumData;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link SumData}. */
public class SumDataAssert<T extends PointData>
extends AbstractAssert<SumDataAssert<T>, SumData<T>> {
protected SumDataAssert(SumData<T> actual) {
super(actual, SumDataAssert.class);
}
/** Ensures that {@code is_monotonic} field is true. */
public SumDataAssert<T> isMonotonic() {
isNotNull();
if (!actual.isMonotonic()) {
failWithActualExpectedAndMessage(
actual, "monotonic: true", "Expected Sum to be monotonic", true, actual.isMonotonic());
}
return myself;
}
/** Ensures that {@code is_monotonic} field is false. */
public SumDataAssert<T> isNotMonotonic() {
isNotNull();
if (actual.isMonotonic()) {
failWithActualExpectedAndMessage(
actual,
"monotonic: fail",
"Expected Sum to be non-monotonic, found: %s",
actual.isMonotonic());
}
return myself;
}
/** Ensures that {@code aggregation_temporality} field is {@code CUMULATIVE}. */
public SumDataAssert<T> isCumulative() {
isNotNull();
if (actual.getAggregationTemporality() != AggregationTemporality.CUMULATIVE) {
failWithActualExpectedAndMessage(
actual,
"aggregationTemporality: CUMULATIVE",
"Expected Sum to have cumulative aggregation but found <%s>",
actual.getAggregationTemporality());
}
return myself;
}
/** Ensures that {@code aggregation_temporality} field is {@code DELTA}. */
public SumDataAssert<T> isDelta() {
isNotNull();
if (actual.getAggregationTemporality() != AggregationTemporality.DELTA) {
failWithActualExpectedAndMessage(
actual,
"aggregationTemporality: DELTA",
"Expected Sum to have delta aggregation but found <%s>",
actual.getAggregationTemporality());
}
return myself;
}
/** Returns convenience API to assert against the {@code points} field. */
public AbstractIterableAssert<?, ? extends Iterable<? extends T>, T, ?> points() {
isNotNull();
return Assertions.assertThat(actual.getPoints());
}
}

View File

@ -1,28 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.SummaryData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import org.assertj.core.api.AbstractAssert;
import org.assertj.core.api.AbstractIterableAssert;
import org.assertj.core.api.Assertions;
/** Assert on a {@link SummaryData} metric. */
public class SummaryDataAssert extends AbstractAssert<SummaryDataAssert, SummaryData> {
protected SummaryDataAssert(SummaryData actual) {
super(actual, SummaryDataAssert.class);
}
/** Returns convenience API to assert against the {@code points} field. */
public AbstractIterableAssert<
?, ? extends Iterable<? extends SummaryPointData>, SummaryPointData, ?>
points() {
isNotNull();
return Assertions.assertThat(actual.getPoints());
}
}

View File

@ -1,39 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.testing.assertj;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.data.ValueAtQuantile;
import org.assertj.core.api.Assertions;
/** Asserts for (deprecated) Summary points. */
public class SummaryPointDataAssert
extends AbstractPointDataAssert<SummaryPointDataAssert, SummaryPointData> {
protected SummaryPointDataAssert(SummaryPointData actual) {
super(actual, SummaryPointDataAssert.class);
}
/** Ensure the summary has seen the expected count of measurements. */
public SummaryPointDataAssert hasCount(long expected) {
isNotNull();
Assertions.assertThat(actual.getCount()).as("count").isEqualTo(expected);
return this;
}
/** Ensure the summary has the expected sum across all observed measurements. */
public SummaryPointDataAssert hasSum(double expected) {
isNotNull();
Assertions.assertThat(actual.getSum()).as("sum").isEqualTo(expected);
return this;
}
/** Ensure the summary has exactly, in any order, the given percentile values. */
public SummaryPointDataAssert hasValues(ValueAtQuantile... values) {
isNotNull();
Assertions.assertThat(actual.getValues()).containsExactlyInAnyOrder(values);
return this;
}
}

View File

@ -10,34 +10,13 @@ import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.api.trace.TraceFlags;
import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.SummaryPointData;
import io.opentelemetry.sdk.metrics.data.ValueAtQuantile;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSummaryPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableValueAtQuantile;
import io.opentelemetry.sdk.metrics.internal.data.exponentialhistogram.ExponentialHistogramData;
import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
@ -46,29 +25,6 @@ public class MetricAssertionsTest {
Resource.create(Attributes.of(stringKey("resource_key"), "resource_value"));
private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO =
InstrumentationScopeInfo.create("instrumentation_library");
private static final MetricData HISTOGRAM_METRIC =
ImmutableMetricData.createDoubleHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "histogram",
/* description= */ "description",
/* unit= */ "unit",
ImmutableHistogramData.create(
AggregationTemporality.CUMULATIVE,
// Points
Collections.emptyList()));
private static final MetricData HISTOGRAM_DELTA_METRIC =
ImmutableMetricData.createDoubleHistogram(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "histogram_delta",
/* description= */ "description",
/* unit= */ "unit",
ImmutableHistogramData.create(
AggregationTemporality.DELTA,
// Points
Collections.emptyList()));
private static final MetricData EXPONENTIAL_HISTOGRAM_METRIC =
ImmutableMetricData.createExponentialHistogram(
@ -94,17 +50,6 @@ public class MetricAssertionsTest {
// Points
Collections.emptyList()));
private static final MetricData DOUBLE_SUMMARY_METRIC =
ImmutableMetricData.createDoubleSummary(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "summary",
/* description= */ "description",
/* unit= */ "unit",
ImmutableSummaryData.create(
// Points
Collections.emptyList()));
private static final MetricData DOUBLE_GAUGE_METRIC =
ImmutableMetricData.createDoubleGauge(
RESOURCE,
@ -116,128 +61,12 @@ public class MetricAssertionsTest {
// Points
Collections.emptyList()));
private static final MetricData DOUBLE_SUM_METRIC =
ImmutableMetricData.createDoubleSum(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "sum",
/* description= */ "description",
/* unit= */ "unit",
ImmutableSumData.create(
true,
AggregationTemporality.CUMULATIVE,
// Points
Collections.emptyList()));
private static final MetricData DOUBLE_DELTA_SUM_METRIC =
ImmutableMetricData.createDoubleSum(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "sum_delta",
/* description= */ "description",
/* unit= */ "unit",
ImmutableSumData.create(
false,
AggregationTemporality.DELTA,
// Points
Collections.emptyList()));
private static final DoubleExemplarData DOUBLE_EXEMPLAR =
ImmutableDoubleExemplarData.create(
Attributes.empty(),
0,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
1.0);
private static final DoublePointData DOUBLE_POINT_DATA =
ImmutableDoublePointData.create(1, 2, Attributes.empty(), 3.0, Collections.emptyList());
private static final DoublePointData DOUBLE_POINT_DATA_WITH_EXEMPLAR =
ImmutableDoublePointData.create(
1, 2, Attributes.empty(), 3.0, Collections.singletonList(DOUBLE_EXEMPLAR));
private static final MetricData LONG_GAUGE_METRIC =
ImmutableMetricData.createLongGauge(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "gauge",
/* description= */ "description",
/* unit= */ "unit",
ImmutableGaugeData.create(
// Points
Collections.emptyList()));
private static final MetricData LONG_SUM_METRIC =
ImmutableMetricData.createLongSum(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "sum",
/* description= */ "description",
/* unit= */ "unit",
ImmutableSumData.create(
true,
AggregationTemporality.CUMULATIVE,
// Points
Collections.emptyList()));
private static final MetricData LONG_DELTA_SUM_METRIC =
ImmutableMetricData.createLongSum(
RESOURCE,
INSTRUMENTATION_SCOPE_INFO,
/* name= */ "sum_delta",
/* description= */ "description",
/* unit= */ "unit",
ImmutableSumData.create(
false,
AggregationTemporality.DELTA,
// Points
Collections.emptyList()));
private static final LongExemplarData LONG_EXEMPLAR =
ImmutableLongExemplarData.create(
Attributes.empty(),
0,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
1);
private static final LongPointData LONG_POINT_DATA =
ImmutableLongPointData.create(1, 2, Attributes.empty(), 3, Collections.emptyList());
private static final LongPointData LONG_POINT_DATA_WITH_EXEMPLAR =
ImmutableLongPointData.create(
1, 2, Attributes.empty(), 3, Collections.singletonList(LONG_EXEMPLAR));
private static final ValueAtQuantile PERCENTILE_VALUE = ImmutableValueAtQuantile.create(0, 1);
private static final SummaryPointData DOUBLE_SUMMARY_POINT_DATA =
ImmutableSummaryPointData.create(
1, 2, Attributes.empty(), 1, 2, Collections.singletonList(PERCENTILE_VALUE));
private static final HistogramPointData DOUBLE_HISTOGRAM_POINT_DATA =
ImmutableHistogramPointData.create(
1,
2,
Attributes.empty(),
15,
4.0,
7.0,
Collections.singletonList(10.0),
Arrays.asList(1L, 2L));
@Test
void metric_passing() {
assertThat(HISTOGRAM_METRIC)
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("histogram")
.hasName("exponential_histogram")
.hasDescription("description")
.hasUnit("unit");
}
@ -246,37 +75,21 @@ public class MetricAssertionsTest {
void metric_fails() {
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasResource(
Resource.create(Attributes.of(stringKey("monkey_key"), "resource_value"))))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(HISTOGRAM_METRIC)
assertThat(EXPONENTIAL_HISTOGRAM_METRIC)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("instrumentation_library_for_monkeys")))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasName("Monkeys"))
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasName("Monkeys"))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasDescription("Monkeys"))
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasDescription("Monkeys"))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasUnit("Monkeys"))
.isInstanceOf(AssertionError.class);
}
@Test
void histogram_passing() {
assertThat(HISTOGRAM_METRIC).hasDoubleHistogram().isCumulative();
assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleHistogram().isDelta();
}
@Test
void histogram_fails() {
assertThatThrownBy(() -> assertThat(DOUBLE_GAUGE_METRIC).hasDoubleHistogram())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(HISTOGRAM_METRIC).hasDoubleHistogram().isDelta())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleHistogram().isCumulative())
assertThatThrownBy(() -> assertThat(EXPONENTIAL_HISTOGRAM_METRIC).hasUnit("Monkeys"))
.isInstanceOf(AssertionError.class);
}
@ -300,230 +113,4 @@ public class MetricAssertionsTest {
.isCumulative())
.isInstanceOf(AssertionError.class);
}
@Test
void summary_passing() {
assertThat(DOUBLE_SUMMARY_METRIC).hasDoubleSummary();
}
@Test
void summary_failing() {
assertThatThrownBy(() -> assertThat(DOUBLE_GAUGE_METRIC).hasDoubleSummary())
.isInstanceOf(AssertionError.class);
}
@Test
void doubleGauge_passing() {
assertThat(DOUBLE_GAUGE_METRIC).hasDoubleGauge();
}
@Test
void doubleGauge_fails() {
assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleGauge())
.isInstanceOf(AssertionError.class);
}
@Test
void doubleSum_passing() {
assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isCumulative().isMonotonic();
assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isDelta().isNotMonotonic();
}
@Test
void doubleSum_fails() {
assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasDoubleSum())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isDelta())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_SUM_METRIC).hasDoubleSum().isNotMonotonic())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isCumulative())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_DELTA_SUM_METRIC).hasDoubleSum().isMonotonic())
.isInstanceOf(AssertionError.class);
}
@Test
void doublePoint_passing() {
assertThat(DOUBLE_POINT_DATA)
.hasStartEpochNanos(1)
.hasEpochNanos(2)
.hasValue(3)
.hasAttributes(Attributes.empty())
.exemplars()
.isEmpty();
assertThat(DOUBLE_POINT_DATA_WITH_EXEMPLAR).hasExemplars(DOUBLE_EXEMPLAR);
}
@Test
void doublePoint_failing() {
assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasStartEpochNanos(2))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasEpochNanos(3))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_POINT_DATA).hasValue(4))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(DOUBLE_POINT_DATA)
.hasAttributes(Attributes.builder().put("x", "y").build()))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(DOUBLE_POINT_DATA)
.hasExemplars(
ImmutableDoubleExemplarData.create(
Attributes.empty(),
0,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
1.0)))
.isInstanceOf(AssertionError.class);
}
@Test
void longPoint_passing() {
assertThat(LONG_POINT_DATA)
.hasStartEpochNanos(1)
.hasEpochNanos(2)
.hasValue(3)
.hasAttributes(Attributes.empty())
.exemplars()
.isEmpty();
assertThat(LONG_POINT_DATA_WITH_EXEMPLAR).hasExemplars(LONG_EXEMPLAR);
}
@Test
void longPoint_failing() {
assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasStartEpochNanos(2))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasEpochNanos(3))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_POINT_DATA).hasValue(4))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(LONG_POINT_DATA)
.hasAttributes(Attributes.builder().put("x", "y").build()))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(LONG_POINT_DATA)
.hasExemplars(
ImmutableLongExemplarData.create(
Attributes.empty(),
0,
SpanContext.create(
"00000000000000000000000000000001",
"0000000000000002",
TraceFlags.getDefault(),
TraceState.getDefault()),
1)))
.isInstanceOf(AssertionError.class);
}
@Test
void longSum_passing() {
assertThat(LONG_SUM_METRIC).hasLongSum().isCumulative().isMonotonic();
assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isDelta().isNotMonotonic();
}
@Test
void longSum_fails() {
assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasLongSum())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_SUM_METRIC).hasLongSum().isDelta())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_SUM_METRIC).hasLongSum().isNotMonotonic())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isCumulative())
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(LONG_DELTA_SUM_METRIC).hasLongSum().isMonotonic())
.isInstanceOf(AssertionError.class);
}
@Test
void longGauge_passing() {
assertThat(LONG_GAUGE_METRIC).hasLongGauge();
}
@Test
void longGauge_fails() {
assertThatThrownBy(() -> assertThat(HISTOGRAM_DELTA_METRIC).hasLongGauge())
.isInstanceOf(AssertionError.class);
}
@Test
void doubleSummaryPointData_passing() {
assertThat(DOUBLE_SUMMARY_POINT_DATA)
.hasCount(1)
.hasSum(2)
.hasEpochNanos(2)
.hasStartEpochNanos(1)
.hasAttributes(Attributes.empty())
.hasValues(PERCENTILE_VALUE);
}
@Test
void doubleSummaryPointData_failing() {
assertThatThrownBy(() -> assertThat(DOUBLE_SUMMARY_POINT_DATA).hasCount(2))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_SUMMARY_POINT_DATA).hasSum(1))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(
() ->
assertThat(DOUBLE_SUMMARY_POINT_DATA)
.hasValues(ImmutableValueAtQuantile.create(1, 1)))
.isInstanceOf(AssertionError.class);
}
@Test
void doubleHistogramPointData_passing() {
assertThat(DOUBLE_HISTOGRAM_POINT_DATA)
.hasCount(3)
.hasSum(15)
.hasMin(4.0)
.hasMax(7.0)
.hasSumGreaterThan(10)
.hasEpochNanos(2)
.hasStartEpochNanos(1)
.hasAttributes(Attributes.empty())
.hasBucketBoundaries(10)
.hasBucketCounts(1, 2);
}
@Test
void doubleHistogramPointData_failing() {
assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasCount(2))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasSum(1))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasSumGreaterThan(20))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasBucketBoundaries(1, 2, 3))
.isInstanceOf(AssertionError.class);
assertThatThrownBy(() -> assertThat(DOUBLE_HISTOGRAM_POINT_DATA).hasBucketCounts(1, 2, 3))
.isInstanceOf(AssertionError.class);
}
}

View File

@ -27,7 +27,7 @@ dependencies {
testImplementation("com.google.guava:guava")
jmh(project(":sdk:trace"))
jmh(project(":sdk:metrics-testing"))
jmh(project(":sdk:testing"))
}
testing {

View File

@ -5,13 +5,15 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.LongCounter;
import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.api.metrics.ObservableLongMeasurement;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.SumData;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
import java.time.Duration;
import java.util.concurrent.atomic.AtomicLong;
@ -62,29 +64,28 @@ class CardinalityTest {
// DELTA reader only has latest
assertThat(deltaReader.collectAllMetrics())
.as("Delta collection " + i)
.hasSize(1)
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasName("sync-counter")
.hasLongSum()
.isDelta()
.points()
.hasSize(1));
.hasLongSumSatisfying(sum -> sum.isDelta().hasPointsSatisfying(point -> {})));
// Make sure we preserve previous cumulatives
int currentSize = i;
assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection " + i)
.hasSize(1)
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasName("sync-counter")
.hasLongSum()
.isCumulative()
.points()
.hasSize(currentSize));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(currentSize))));
}
// Now punch the limit and ONLY metrics we just recorded stay, due to simplistic GC.
for (int i = 2001; i <= 2010; i++) {
@ -92,27 +93,33 @@ class CardinalityTest {
}
assertThat(deltaReader.collectAllMetrics())
.as("Delta collection - post limit @ 10")
.hasSize(1)
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasName("sync-counter")
.hasLongSum()
.isDelta()
.points()
.hasSize(10));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(10))));
assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection - post limit @ 10")
.hasSize(1)
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasName("sync-counter")
.hasLongSum()
.isCumulative()
.points()
.hasSize(10));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(10))));
}
/**
@ -134,27 +141,20 @@ class CardinalityTest {
for (int i = 1; i <= 5; i++) {
assertThat(deltaReader.collectAllMetrics())
.as("Delta collection " + i)
.hasSize(1)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("async-counter")
.hasLongSum()
.isDelta()
.points()
.hasSize(1));
.hasLongSumSatisfying(sum -> sum.isDelta().hasPointsSatisfying(point -> {})));
assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection " + i)
.hasSize(1)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("async-counter")
.hasLongSum()
.isCumulative()
.points()
.hasSize(1));
.hasLongSumSatisfying(
sum -> sum.isCumulative().hasPointsSatisfying(point -> {})));
}
}
@ -173,41 +173,55 @@ class CardinalityTest {
assertThat(deltaReader.collectAllMetrics())
.as("Delta collection")
.hasSize(2)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("sync-counter1")
.hasLongSum()
.isDelta()
.points()
.hasSize(MAX_ACCUMULATIONS),
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))),
metricData ->
assertThat(metricData)
.hasName("sync-counter2")
.hasLongSum()
.isDelta()
.points()
.hasSize(MAX_ACCUMULATIONS));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))));
assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection")
.hasSize(2)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("sync-counter1")
.hasLongSum()
.isCumulative()
.points()
.hasSize(MAX_ACCUMULATIONS),
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))),
metricData ->
assertThat(metricData)
.hasName("sync-counter2")
.hasLongSum()
.isCumulative()
.points()
.hasSize(MAX_ACCUMULATIONS));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))));
}
/**
@ -227,40 +241,54 @@ class CardinalityTest {
assertThat(deltaReader.collectAllMetrics())
.as("Delta collection")
.hasSize(2)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("async-counter1")
.hasLongSum()
.isDelta()
.points()
.hasSize(MAX_ACCUMULATIONS),
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))),
metricData ->
assertThat(metricData)
.hasName("async-counter2")
.hasLongSum()
.isDelta()
.points()
.hasSize(MAX_ACCUMULATIONS));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))));
assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection")
.hasSize(2)
.satisfiesExactlyInAnyOrder(
metricData ->
assertThat(metricData)
.hasName("async-counter1")
.hasLongSum()
.isCumulative()
.points()
.hasSize(MAX_ACCUMULATIONS),
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))),
metricData ->
assertThat(metricData)
.hasName("async-counter2")
.hasLongSum()
.isCumulative()
.points()
.hasSize(MAX_ACCUMULATIONS));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
(Consumer<SumData<LongPointData>>)
sumPointData ->
assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.github.netmikey.logunit.api.LogCapturer;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
@ -55,9 +55,8 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
meterProvider.get("meter2").counterBuilder("counter2").ofDoubles().build().add(10);
meterProvider.get("meter2").counterBuilder("counter2").ofDoubles().build().add(10);
@ -68,9 +67,8 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter2")
.hasDoubleSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasDoubleSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
meterProvider
.get("meter3")
@ -92,9 +90,8 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter3"))
.hasName("counter3")
.hasDescription("description3")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
meterProvider.get("meter4").counterBuilder("counter4").setUnit("unit4").build().add(10);
meterProvider.get("meter4").counterBuilder("counter4").setUnit("unit4").build().add(10);
@ -106,9 +103,8 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter4"))
.hasName("counter4")
.hasUnit("unit4")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -126,16 +122,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -154,16 +148,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
meterProvider.get("meter1").counterBuilder("counter1").build().add(10);
meterProvider
@ -180,17 +172,15 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("meter1", "version1", null))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
meterProvider
.meterBuilder("meter1")
@ -215,17 +205,15 @@ class IdentityTest {
.hasInstrumentationScope(
InstrumentationScopeInfo.create("meter1", "version1", null))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("meter1", "version1", "schema1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -251,16 +239,14 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -287,17 +273,15 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasUnit("unit1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasUnit("")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -321,18 +305,15 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.isNotMonotonic()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic().hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.isMonotonic()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.isMonotonic().hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -356,16 +337,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDoubleSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasDoubleSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -393,9 +372,8 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -419,17 +397,15 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -453,17 +429,15 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -487,16 +461,14 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -522,16 +494,14 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -557,16 +527,14 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -605,17 +573,15 @@ class IdentityTest {
InstrumentationScopeInfo.create("meter1", "version1", null))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("meter1", "version2", null))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -654,17 +620,15 @@ class IdentityTest {
InstrumentationScopeInfo.create("meter1", null, "schema1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(
InstrumentationScopeInfo.create("meter1", null, "schema2"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -693,25 +657,22 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
},
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10)));
assertThat(metricData.getDescription()).isBlank();
});
@ -743,17 +704,15 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -786,16 +745,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDoubleHistogram()
.points()
.satisfiesExactly(point -> assertThat(point).hasSum(20)));
.hasHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasSum(20))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -826,16 +783,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter-new")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter-new")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents())
.allSatisfy(
@ -864,16 +819,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter-new")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter2"))
.hasName("counter-new")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -907,16 +860,14 @@ class IdentityTest {
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasDescription("description1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(20)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(20))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1-histogram")
.hasDoubleHistogram()
.points()
.satisfiesExactly(point -> assertThat(point).hasSum(20)));
.hasHistogramSatisfying(
histogram -> histogram.hasPointsSatisfying(point -> point.hasSum(20))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
assertThat(viewRegistryLogs.getEvents()).hasSize(0);
@ -948,16 +899,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
}
@ -987,16 +936,14 @@ class IdentityTest {
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter1")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1)),
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))),
metricData ->
assertThat(metricData)
.hasInstrumentationScope(forMeter("meter1"))
.hasName("counter2")
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(10)));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(10))));
assertThat(metricStorageRegistryLogs.getEvents()).hasSize(0);
assertThat(viewRegistryLogs.getEvents())

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.github.netmikey.logunit.api.LogCapturer;
@ -16,7 +17,6 @@ import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleCounter;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
@ -95,17 +95,17 @@ class SdkDoubleCounterTest {
.hasName("testCounter")
.hasDescription("description")
.hasUnit("ms")
.hasDoubleSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24)));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24))));
}
@Test
@ -132,24 +132,23 @@ class SdkDoubleCounterTest {
.hasName("testCounter")
.hasDescription("")
.hasUnit("")
.hasDoubleSum()
.isMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasAttributes(Attributes.empty()).hasValue(33.5),
point ->
assertThat(point)
.hasValue(555.9)
.attributes()
.hasSize(1)
.containsEntry("K", "V")));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(33.5),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(555.9)
.hasAttributes(attributeEntry("K", "V")))));
// Repeat to prove we keep previous values.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -159,21 +158,23 @@ class SdkDoubleCounterTest {
.satisfiesExactly(
metric ->
assertThat(metric)
.hasDoubleSum()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasAttributes(Attributes.empty()).hasValue(44.5),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(777.9)));
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(44.5),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(777.9)
.hasAttributes(attributeEntry("K", "V")))));
} finally {
bound.unbind();
}
@ -234,19 +235,17 @@ class SdkDoubleCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testCounter")
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(80_000)
.attributes()
.hasSize(1)
.containsEntry("K", "V")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(80_000)
.hasAttributes(attributeEntry("K", "V")))));
}
@Test
@ -281,22 +280,35 @@ class SdkDoubleCounterTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.Meter;
@ -41,7 +42,11 @@ class SdkDoubleGaugeBuilderTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testGauge").hasDoubleGauge().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testGauge")
.hasDoubleGaugeSatisfying(
doubleGauge -> doubleGauge.hasPointsSatisfying(poit -> {})));
gauge.close();
@ -76,15 +81,15 @@ class SdkDoubleGaugeBuilderTest {
.hasName("testObserver")
.hasDescription("My own DoubleValueObserver")
.hasUnit("ms")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("k", "v").build())
.hasValue(12.1d)));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("k", "v"))
.hasValue(12.1d))));
testClock.advance(Duration.ofSeconds(1));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -95,14 +100,14 @@ class SdkDoubleGaugeBuilderTest {
.hasName("testObserver")
.hasDescription("My own DoubleValueObserver")
.hasUnit("ms")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("k", "v").build())
.hasValue(12.1d)));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("k", "v"))
.hasValue(12.1d))));
}
}

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.github.netmikey.logunit.api.LogCapturer;
@ -16,10 +17,10 @@ import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleHistogram;
import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
@ -97,21 +98,23 @@ class SdkDoubleHistogramTest {
.hasName("testHistogram")
.hasDescription("description")
.hasUnit("ms")
.hasDoubleHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(24)
.hasBucketBoundaries(
5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, 5_000,
7_500, 10_000)
.hasBucketCounts(0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)));
.hasHistogramSatisfying(
histogram ->
histogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(24)
.hasBucketBoundaries(
5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500,
5_000, 7_500, 10_000)
.hasBucketCounts(
0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))));
}
@Test
@ -136,26 +139,27 @@ class SdkDoubleHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasCount(3)
.hasSum(566.3d)
.hasBucketCounts(0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.builder().put("K", "V").build()),
point ->
assertThat(point)
.hasCount(2)
.hasSum(22.2d)
.hasBucketCounts(0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty())));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(3)
.hasSum(566.3d)
.hasBucketCounts(
0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry("K", "V")),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(2)
.hasSum(22.2d)
.hasBucketCounts(
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty()))));
// Histograms are cumulative by default.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -168,26 +172,27 @@ class SdkDoubleHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasCount(4)
.hasSum(788.3)
.hasBucketCounts(0, 0, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.builder().put("K", "V").build()),
point ->
assertThat(point)
.hasCount(3)
.hasSum(39.2)
.hasBucketCounts(0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty())));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(4)
.hasSum(788.3)
.hasBucketCounts(
0, 0, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry("K", "V")),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(3)
.hasSum(39.2)
.hasBucketCounts(
0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty()))));
} finally {
bound.unbind();
}
@ -220,7 +225,7 @@ class SdkDoubleHistogramTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric ->
assertThat(metric)
MetricAssertions.assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
@ -231,7 +236,7 @@ class SdkDoubleHistogramTest {
.points()
.satisfiesExactlyInAnyOrder(
point -> {
assertThat(point)
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
@ -239,15 +244,15 @@ class SdkDoubleHistogramTest {
.hasSum(25)
.hasScale(-1)
.hasZeroCount(0);
assertThat(point.getPositiveBuckets())
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(1)
.hasCounts(Collections.singletonList(2L));
assertThat(point.getNegativeBuckets())
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
},
point -> {
assertThat(point)
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("key", "value").build())
@ -255,10 +260,10 @@ class SdkDoubleHistogramTest {
.hasSum(12)
.hasScale(-1)
.hasZeroCount(0);
assertThat(point.getPositiveBuckets())
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(1)
.hasCounts(Collections.singletonList(1L));
assertThat(point.getNegativeBuckets())
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
}));
@ -321,16 +326,16 @@ class SdkDoubleHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasCount(8_000)
.hasSum(80_000)));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasCount(8_000)
.hasSum(80_000))));
}
@Test
@ -369,22 +374,45 @@ class SdkDoubleHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(4_000)
.hasSum(40_000)
.hasBucketCounts(0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(4_000)
.hasSum(40_000)
.hasBucketCounts(
0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(4_000)
.hasSum(40_000)
.hasBucketCounts(
0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(4_000)
.hasSum(40_000)
.hasBucketCounts(
0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(4_000)
.hasSum(40_000)
.hasBucketCounts(
0, 2000, 2000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
@ -14,7 +15,6 @@ import io.opentelemetry.api.metrics.DoubleUpDownCounter;
import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundDoubleUpDownCounter;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
@ -98,17 +98,17 @@ class SdkDoubleUpDownCounterTest {
.hasName("testUpDownCounter")
.hasDescription("description")
.hasUnit("ms")
.hasDoubleSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24)));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24))));
}
@Test
@ -135,22 +135,23 @@ class SdkDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasDoubleSum()
.isNotMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasAttributes(Attributes.empty()).hasValue(33.5),
point ->
assertThat(point)
.hasValue(555.9)
.hasAttributes(Attributes.of(stringKey("K"), "V"))));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(33.5),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(555.9)
.hasAttributes(attributeEntry("K", "V")))));
// Repeat to prove we keep previous values.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -163,22 +164,23 @@ class SdkDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasDoubleSum()
.isNotMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point).hasAttributes(Attributes.empty()).hasValue(44.5),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(777.9)));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(44.5),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasValue(777.9))));
} finally {
bound.unbind();
}
@ -215,19 +217,17 @@ class SdkDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasDoubleSum()
.isCumulative()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(80_000)
.attributes()
.hasSize(1)
.containsEntry("K", "V")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(80_000)
.hasAttributes(attributeEntry("K", "V")))));
}
@Test
@ -264,22 +264,35 @@ class SdkDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasDoubleSum()
.isCumulative()
.isNotMonotonic()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(40_000)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.github.netmikey.logunit.api.LogCapturer;
@ -16,7 +17,6 @@ import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongCounter;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
@ -87,17 +87,18 @@ class SdkLongCounterTest {
.hasName("testCounter")
.hasDescription("description")
.hasUnit("By")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24)));
.hasLongSumSatisfying(
longSum ->
longSum
.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24))));
}
@Test
@ -122,21 +123,24 @@ class SdkLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testCounter")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(33),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(555)));
.hasLongSumSatisfying(
longSum ->
longSum
.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(33),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasValue(555))));
// Repeat to prove we keep previous values.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -149,21 +153,24 @@ class SdkLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testCounter")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(44),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(777)));
.hasLongSumSatisfying(
longSum ->
longSum
.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(44),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasValue(777))));
} finally {
bound.unbind();
}
@ -224,19 +231,18 @@ class SdkLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testCounter")
.hasLongSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(160_000)
.attributes()
.hasSize(1)
.containsEntry("K", "V")));
.hasLongSumSatisfying(
longSum ->
longSum
.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(160_000)
.hasAttributes(attributeEntry("K", "V")))));
}
@Test
@ -272,22 +278,36 @@ class SdkLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testCounter")
.hasLongSum()
.isCumulative()
.isMonotonic()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasLongSumSatisfying(
longSum ->
longSum
.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.Meter;
@ -44,7 +45,11 @@ class SdkLongGaugeBuilderTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testGauge").hasLongGauge().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testGauge")
.hasLongGaugeSatisfying(
longGauge -> longGauge.hasPointsSatisfying(point -> {})));
gauge.close();
@ -76,15 +81,15 @@ class SdkLongGaugeBuilderTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("k", "v").build())
.hasValue(12)));
.hasLongGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("k", "v"))
.hasValue(12))));
testClock.advance(Duration.ofSeconds(1));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -93,14 +98,14 @@ class SdkLongGaugeBuilderTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("k", "v").build())
.hasValue(12)));
.hasLongGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2000000000L)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("k", "v"))
.hasValue(12))));
}
}

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.github.netmikey.logunit.api.LogCapturer;
@ -16,10 +17,10 @@ import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.internal.testing.slf4j.SuppressLogger;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongHistogram;
import io.opentelemetry.sdk.metrics.internal.view.ExponentialHistogramAggregation;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
@ -97,21 +98,23 @@ class SdkLongHistogramTest {
.hasName("testHistogram")
.hasDescription("description")
.hasUnit("By")
.hasDoubleHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(24)
.hasBucketBoundaries(
5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500, 5_000,
7_500, 10_000)
.hasBucketCounts(0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)));
.hasHistogramSatisfying(
histogram ->
histogram
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(2)
.hasSum(24)
.hasBucketBoundaries(
5, 10, 25, 50, 75, 100, 250, 500, 750, 1_000, 2_500,
5_000, 7_500, 10_000)
.hasBucketCounts(
0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))));
}
@Test
@ -136,26 +139,27 @@ class SdkLongHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasCount(3)
.hasSum(445)
.hasBucketCounts(1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.builder().put("K", "V").build()),
point ->
assertThat(point)
.hasCount(2)
.hasSum(23)
.hasBucketCounts(0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty())));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(3)
.hasSum(445)
.hasBucketCounts(
1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry("K", "V")),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(2)
.hasSum(23)
.hasBucketCounts(
0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty()))));
// Histograms are cumulative by default.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -168,26 +172,27 @@ class SdkLongHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasCount(4)
.hasSum(667)
.hasBucketCounts(1, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.builder().put("K", "V").build()),
point ->
assertThat(point)
.hasCount(3)
.hasSum(40)
.hasBucketCounts(0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty())));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(4)
.hasSum(667)
.hasBucketCounts(
1, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry("K", "V")),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasCount(3)
.hasSum(40)
.hasBucketCounts(
0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(Attributes.empty()))));
} finally {
bound.unbind();
}
@ -221,7 +226,7 @@ class SdkLongHistogramTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric ->
assertThat(metric)
MetricAssertions.assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
@ -232,7 +237,7 @@ class SdkLongHistogramTest {
.points()
.satisfiesExactlyInAnyOrder(
point -> {
assertThat(point)
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
@ -240,15 +245,15 @@ class SdkLongHistogramTest {
.hasSum(25)
.hasScale(-1)
.hasZeroCount(0);
assertThat(point.getPositiveBuckets())
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(1)
.hasCounts(Collections.singletonList(2L));
assertThat(point.getNegativeBuckets())
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
},
point -> {
assertThat(point)
MetricAssertions.assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.builder().put("key", "value").build())
@ -256,10 +261,10 @@ class SdkLongHistogramTest {
.hasSum(12)
.hasScale(-1)
.hasZeroCount(0);
assertThat(point.getPositiveBuckets())
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasOffset(1)
.hasCounts(Collections.singletonList(1L));
assertThat(point.getNegativeBuckets())
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasOffset(0)
.hasCounts(Collections.emptyList());
}));
@ -322,16 +327,16 @@ class SdkLongHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasCount(16_000)
.hasSum(160_000)));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasCount(16_000)
.hasSum(160_000))));
}
@Test
@ -370,22 +375,45 @@ class SdkLongHistogramTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testHistogram")
.hasDoubleHistogram()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(2_000)
.hasSum(20_000)
.hasBucketCounts(0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(2_000)
.hasSum(20_000)
.hasBucketCounts(
0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(2_000)
.hasSum(20_000)
.hasBucketCounts(
0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(2_000)
.hasSum(20_000)
.hasBucketCounts(
0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasCount(2_000)
.hasSum(20_000)
.hasBucketCounts(
0, 1000, 1000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import io.opentelemetry.api.common.Attributes;
@ -14,7 +15,6 @@ import io.opentelemetry.api.metrics.LongUpDownCounter;
import io.opentelemetry.api.metrics.Meter;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.StressTestRunner.OperationUpdater;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.instrument.BoundLongUpDownCounter;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.exporter.InMemoryMetricReader;
@ -90,17 +90,17 @@ class SdkLongUpDownCounterTest {
.hasName("testUpDownCounter")
.hasDescription("description")
.hasUnit("By")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24)));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(24))));
}
@Test
@ -126,21 +126,23 @@ class SdkLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(33),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(555)));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(33),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasValue(555))));
// Repeat to prove we keep previous values.
testClock.advance(Duration.ofNanos(SECOND_NANOS));
@ -153,21 +155,23 @@ class SdkLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now()))
.satisfiesExactlyInAnyOrder(
point -> assertThat(point).hasAttributes(Attributes.empty()).hasValue(44),
point ->
assertThat(point)
.hasAttributes(Attributes.of(stringKey("K"), "V"))
.hasValue(777)));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(44),
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasAttributes(attributeEntry("K", "V"))
.hasValue(777))));
} finally {
bound.unbind();
}
@ -204,19 +208,17 @@ class SdkLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasLongSum()
.isCumulative()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(160_000)
.attributes()
.hasSize(1)
.containsEntry("K", "V")));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(160_000)
.hasAttributes(attributeEntry("K", "V")))));
}
@Test
@ -253,22 +255,35 @@ class SdkLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testUpDownCounter")
.hasLongSum()
.isCumulative()
.isNotMonotonic()
.points()
.allSatisfy(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000))
.extracting(PointData::getAttributes)
.containsExactlyInAnyOrder(
Attributes.of(stringKey(keys[0]), values[0]),
Attributes.of(stringKey(keys[1]), values[1]),
Attributes.of(stringKey(keys[2]), values[2]),
Attributes.of(stringKey(keys[3]), values[3])));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[0], values[0])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[1], values[1])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[2], values[2])),
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasValue(20_000)
.hasAttributes(attributeEntry(keys[3], values[3])))));
}
private static class OperationUpdaterWithBinding extends OperationUpdater {

View File

@ -5,8 +5,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static org.assertj.core.api.Assertions.entry;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.mockito.Mockito.when;
import io.github.netmikey.logunit.api.LogCapturer;
@ -27,7 +27,6 @@ import io.opentelemetry.context.Context;
import io.opentelemetry.context.Scope;
import io.opentelemetry.sdk.common.CompletableResultCode;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.export.MetricReader;
import io.opentelemetry.sdk.metrics.internal.SdkMeterProviderUtil;
@ -120,87 +119,89 @@ class SdkMeterProviderTest {
metric ->
assertThat(metric)
.hasName("testDoubleHistogram")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(1)
.hasSum(10.1)
.hasBucketCounts(0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(1)
.hasSum(10.1)
.hasBucketCounts(
0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))),
metric ->
assertThat(metric)
.hasName("testDoubleCounter")
.hasDoubleSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1)),
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1))),
metric ->
assertThat(metric)
.hasName("testLongHistogram")
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(1)
.hasSum(10)
.hasBucketCounts(0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)),
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasCount(1)
.hasSum(10)
.hasBucketCounts(
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0))),
metric ->
assertThat(metric)
.hasName("testLongUpDownCounter")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10)),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10))),
metric ->
assertThat(metric)
.hasName("testLongCounter")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10)),
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10))),
metric ->
assertThat(metric)
.hasName("testDoubleUpDownCounter")
.hasDoubleSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10.1)));
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10.1))));
}
@Test
@ -226,16 +227,17 @@ class SdkMeterProviderTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testLongCounter")
.hasDoubleHistogram()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1)));
.hasHistogramSatisfying(
histogram ->
histogram
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1))));
longCounter.add(10, Attributes.empty());
testClock.advance(Duration.ofSeconds(1));
@ -246,16 +248,17 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasDoubleHistogram()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1)));
.hasHistogramSatisfying(
histogram ->
histogram
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1))));
}
@Test
@ -285,16 +288,17 @@ class SdkMeterProviderTest {
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasDescription("")
.hasUnit("")
.hasDoubleHistogram()
.isDelta()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1)))
.hasHistogramSatisfying(
histogram ->
histogram
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1))))
.extracting(MetricData::getName)
.containsExactlyInAnyOrder(
"testLongCounter", "testDoubleCounter", "testLongHistogram", "testDoubleHistogram");
@ -314,16 +318,17 @@ class SdkMeterProviderTest {
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasDescription("")
.hasUnit("")
.hasDoubleHistogram()
.isDelta()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1)))
.hasHistogramSatisfying(
histogram ->
histogram
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 1000000000)
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasBucketCounts(1))))
.extracting(MetricData::getName)
.containsExactlyInAnyOrder(
"testLongCounter", "testDoubleCounter", "testLongHistogram", "testDoubleHistogram");
@ -370,83 +375,83 @@ class SdkMeterProviderTest {
metric ->
assertThat(metric)
.hasName("testLongSumObserver")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10)),
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10))),
metric ->
assertThat(metric)
.hasName("testDoubleSumObserver")
.hasDoubleSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1)),
.hasDoubleSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1))),
metric ->
assertThat(metric)
.hasName("testLongUpDownSumObserver")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10)),
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10))),
metric ->
assertThat(metric)
.hasName("testDoubleUpDownSumObserver")
.hasDoubleSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10.1)),
.hasDoubleSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(-10.1))),
metric ->
assertThat(metric)
.hasName("testLongValueObserver")
.hasLongGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10)),
.hasLongGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10))),
metric ->
assertThat(metric)
.hasName("testDoubleValueObserver")
.hasDoubleGauge()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1)));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())
.hasAttributes(Attributes.empty())
.hasValue(10.1))));
}
@Test
@ -473,18 +478,11 @@ class SdkMeterProviderTest {
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasLongSum()
.points()
.hasSize(2)
.satisfiesExactlyInAnyOrder(
pointData ->
assertThat(pointData)
.hasAttributes(Attributes.builder().put("callback", "one").build()),
(Consumer<LongPointData>)
longPointData ->
assertThat(longPointData)
.hasAttributes(
Attributes.builder().put("callback", "two").build())));
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasAttributes(attributeEntry("callback", "one")),
point -> point.hasAttributes(attributeEntry("callback", "two")))));
observableCounter1.close();
@ -493,15 +491,10 @@ class SdkMeterProviderTest {
.satisfiesExactly(
metricData ->
assertThat(metricData)
.hasLongSum()
.points()
.hasSize(1)
.satisfiesExactlyInAnyOrder(
(Consumer<LongPointData>)
longPointData ->
assertThat(longPointData)
.hasAttributes(
Attributes.builder().put("callback", "two").build())));
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasAttributes(attributeEntry("callback", "two")))));
observableCounter2.close();
assertThat(reader.collectAllMetrics()).hasSize(0);
@ -546,14 +539,11 @@ class SdkMeterProviderTest {
assertThat(allMetricData)
.hasSize(4)
.allSatisfy(
metricData -> {
assertThat(metricData)
.hasInstrumentationScope(InstrumentationScopeInfo.create("meter"))
.hasLongSum()
.points()
.hasSize(1)
.satisfiesExactly(point -> assertThat(point).hasValue(1));
});
metricData ->
assertThat(metricData)
.hasInstrumentationScope(InstrumentationScopeInfo.create("meter"))
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1))));
} finally {
executorService.shutdown();
}
@ -583,12 +573,10 @@ class SdkMeterProviderTest {
.satisfiesExactly(
metric ->
assertThat(metric)
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point.getAttributes().asMap())
.containsOnly(entry(AttributeKey.stringKey("allowed"), "bear"))));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point -> point.hasAttributes(attributeEntry("allowed", "bear")))));
}
@Test
@ -618,11 +606,11 @@ class SdkMeterProviderTest {
.hasName("not_test")
.hasDescription("not_desc")
.hasUnit("unit")
.hasDoubleGauge());
.hasDoubleGaugeSatisfying(gauge -> {}));
}
@Test
void viewSdk_AllowMulitpleViewsPerSynchronousInstrument() {
void viewSdk_AllowMultipleViewsPerSynchronousInstrument() {
InstrumentSelector selector = InstrumentSelector.builder().setName("test").build();
InMemoryMetricReader reader = InMemoryMetricReader.create();
SdkMeterProvider provider =
@ -654,13 +642,13 @@ class SdkMeterProviderTest {
.hasName("not_test")
.hasDescription("not_desc")
.hasUnit("unit")
.hasDoubleHistogram(),
.hasHistogramSatisfying(histogramAssert -> {}),
metric ->
assertThat(metric)
.hasName("not_test_2")
.hasDescription("not_desc_2")
.hasUnit("unit")
.hasDoubleSum());
.hasDoubleSumSatisfying(sum -> {}));
}
@Test
@ -698,13 +686,13 @@ class SdkMeterProviderTest {
.hasName("not_test")
.hasDescription("not_desc")
.hasUnit("unit")
.hasDoubleGauge(),
.hasDoubleGaugeSatisfying(gauge -> {}),
metric ->
assertThat(metric)
.hasName("not_test_2")
.hasDescription("not_desc_2")
.hasUnit("unit")
.hasDoubleGauge());
.hasDoubleGaugeSatisfying(gauge -> {}));
}
@Test
@ -737,14 +725,12 @@ class SdkMeterProviderTest {
metric ->
assertThat(metric)
.hasName("test")
.hasLongSum()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasAttributes(
Attributes.builder().put("baggage", "value").build())));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point.hasAttributes(attributeEntry("baggage", "value")))));
}
@Test
@ -769,15 +755,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(1)));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(1))));
counter.add(1L);
testClock.advance(Duration.ofSeconds(1));
@ -789,15 +775,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2)));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2))));
// Make sure Collector 1 sees the same point as 2
assertThat(collector1.collectAllMetrics())
@ -806,15 +792,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2)));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2))));
}
@Test
@ -847,15 +833,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(1)));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(1))));
long collectorOneTimeOne = testClock.now();
counter.add(1L);
@ -868,15 +854,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2)));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(startTime)
.hasEpochNanos(testClock.now())
.hasValue(2))));
// Make sure Collector 1 sees the same point as 2, when it collects.
assertThat(collector1.collectAllMetrics())
@ -885,15 +871,15 @@ class SdkMeterProviderTest {
assertThat(metric)
.hasResource(RESOURCE)
.hasName("testSum")
.hasLongSum()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(collectorOneTimeOne)
.hasEpochNanos(testClock.now())
.hasValue(1)));
.hasLongSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(collectorOneTimeOne)
.hasEpochNanos(testClock.now())
.hasValue(1))));
}
@Test

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.mock;
@ -116,16 +116,16 @@ class SdkMeterRegistryTest {
metric ->
assertThat(metric)
.hasName("testLongCounter")
.hasLongSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasValue(10)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now())))
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasValue(10)
.hasStartEpochNanos(testClock.now())
.hasEpochNanos(testClock.now()))))
.extracting(MetricData::getInstrumentationScopeInfo)
.containsExactlyInAnyOrder(
((SdkMeter) sdkMeter1).getInstrumentationScopeInfo(),

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.ObservableDoubleCounter;
@ -42,7 +43,10 @@ class SdkObservableDoubleCounterTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testCounter").hasDoubleSum().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testCounter")
.hasDoubleSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {})));
counter.close();
@ -87,19 +91,17 @@ class SdkObservableDoubleCounterTest {
.hasName("testObserver")
.hasDescription("My own DoubleSumObserver")
.hasUnit("ms")
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -108,19 +110,17 @@ class SdkObservableDoubleCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(attributeEntry("k", "v")))));
}
@Test
@ -151,19 +151,18 @@ class SdkObservableDoubleCounterTest {
.hasName("testObserver")
.hasDescription("My own DoubleSumObserver")
.hasUnit("ms")
.hasDoubleSum()
.isDelta()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(
Attributes.builder().put("k", "v").build()))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -174,18 +173,16 @@ class SdkObservableDoubleCounterTest {
.hasName("testObserver")
.hasDescription("My own DoubleSumObserver")
.hasUnit("ms")
.hasDoubleSum()
.isDelta()
.isMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.hasAttributes(attributeEntry("k", "v")))));
}
}

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.Mockito.mock;
@ -44,7 +45,10 @@ class SdkObservableDoubleUpDownCounterTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testCounter").hasDoubleSum().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testCounter")
.hasDoubleSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {})));
counter.close();
@ -85,19 +89,17 @@ class SdkObservableDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasDoubleSum()
.isCumulative()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -106,19 +108,17 @@ class SdkObservableDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasDoubleSum()
.isCumulative()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(attributeEntry("k", "v")))));
}
@Test
@ -147,19 +147,17 @@ class SdkObservableDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasDoubleSum()
.isDelta()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12.1)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -168,19 +166,17 @@ class SdkObservableDoubleUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasDoubleSum()
.isDelta()
.isNotMonotonic()
.points()
.satisfiesExactlyInAnyOrder(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.isNotMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.hasAttributes(attributeEntry("k", "v")))));
}
@Test

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.ObservableLongCounter;
@ -41,7 +42,10 @@ class SdkObservableLongCounterTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testCounter").hasLongSum().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testCounter")
.hasLongSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {})));
counter.close();
@ -79,19 +83,17 @@ class SdkObservableLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -100,19 +102,17 @@ class SdkObservableLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
}
@Test
@ -137,19 +137,17 @@ class SdkObservableLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isMonotonic()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -158,18 +156,16 @@ class SdkObservableLongCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isMonotonic()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isMonotonic()
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.hasAttributes(attributeEntry("k", "v")))));
}
}

View File

@ -6,7 +6,8 @@
package io.opentelemetry.sdk.metrics;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.ObservableLongUpDownCounter;
@ -41,7 +42,10 @@ class SdkObservableLongUpDownCounterTest {
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
metric -> assertThat(metric).hasName("testCounter").hasLongSum().points().hasSize(1));
metric ->
assertThat(metric)
.hasName("testCounter")
.hasLongSumSatisfying(sum -> sum.hasPointsSatisfying(point -> {})));
counter.close();
@ -79,19 +83,17 @@ class SdkObservableLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -100,19 +102,17 @@ class SdkObservableLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isNotMonotonic()
.isCumulative()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - 2 * SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
}
@Test
@ -139,19 +139,17 @@ class SdkObservableLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isNotMonotonic()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(12)
.hasAttributes(attributeEntry("k", "v")))));
testClock.advance(Duration.ofNanos(SECOND_NANOS));
assertThat(sdkMeterReader.collectAllMetrics())
.satisfiesExactly(
@ -160,18 +158,16 @@ class SdkObservableLongUpDownCounterTest {
.hasResource(RESOURCE)
.hasInstrumentationScope(INSTRUMENTATION_SCOPE_INFO)
.hasName("testObserver")
.hasLongSum()
.isNotMonotonic()
.isDelta()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.attributes()
.hasSize(1)
.containsEntry("k", "v")));
.hasLongSumSatisfying(
sum ->
sum.isNotMonotonic()
.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(testClock.now() - SECOND_NANOS)
.hasEpochNanos(testClock.now())
.hasValue(0)
.hasAttributes(attributeEntry("k", "v")))));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import com.google.common.collect.ImmutableList;
import io.opentelemetry.api.common.Attributes;
@ -301,17 +301,17 @@ class DoubleExplicitBucketHistogramAggregatorTest {
0,
10,
100))
.hasDoubleHistogram()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasSum(2)
.hasMin(2)
.hasMax(2)
.hasBucketCounts(1, 0, 0, 0)
.hasCount(1)
.hasExemplars(exemplar));
.hasHistogramSatisfying(
histogram ->
histogram.hasPointsSatisfying(
point ->
point
.hasSum(2)
.hasMin(2)
.hasMax(2)
.hasBucketCounts(1, 0, 0, 0)
.hasCount(1)
.hasExemplars(exemplar)));
}
@Test

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import com.google.common.collect.ImmutableList;
import io.opentelemetry.api.common.Attributes;
@ -24,6 +24,7 @@ import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory;
import io.opentelemetry.sdk.resources.Resource;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@ -480,23 +481,23 @@ class DoubleExponentialHistogramAggregatorTest {
// Assertions run twice to verify immutability; recordings shouldn't modify the metric data
for (int i = 0; i < 2; i++) {
assertThat(metricDataCumulative)
MetricAssertions.assertThat(metricDataCumulative)
.hasExponentialHistogram()
.isCumulative()
.points()
.satisfiesExactly(
point -> {
assertThat(point)
MetricAssertions.assertThat(point)
.hasSum(123.456)
.hasScale(20)
.hasZeroCount(2)
.hasCount(3)
.hasExemplars(exemplar);
assertThat(point.getPositiveBuckets())
MetricAssertions.assertThat(point.getPositiveBuckets())
.hasCounts(Collections.singletonList(1L))
.hasOffset(valueToIndex(20, 123.456))
.hasTotalCount(1);
assertThat(point.getNegativeBuckets())
MetricAssertions.assertThat(point.getNegativeBuckets())
.hasTotalCount(0)
.hasCounts(Collections.emptyList());
});
@ -553,8 +554,12 @@ class DoubleExponentialHistogramAggregatorTest {
assertThat(acc.getZeroCount()).isEqualTo(numberOfUpdates);
assertThat(acc.getSum()).isCloseTo(100.0D * 10000, Offset.offset(0.0001)); // float error
assertThat(acc.getScale()).isEqualTo(5);
assertThat(acc.getPositiveBuckets()).hasTotalCount(numberOfUpdates * 3).hasOffset(-107);
assertThat(acc.getNegativeBuckets()).hasTotalCount(numberOfUpdates * 2).hasOffset(-107);
MetricAssertions.assertThat(acc.getPositiveBuckets())
.hasTotalCount(numberOfUpdates * 3)
.hasOffset(-107);
MetricAssertions.assertThat(acc.getNegativeBuckets())
.hasTotalCount(numberOfUpdates * 2)
.hasOffset(-107);
// Verify positive buckets have correct counts
List<Long> posCounts = acc.getPositiveBuckets().getBucketCounts();

View File

@ -5,12 +5,13 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import io.opentelemetry.sdk.metrics.internal.state.ExponentialCounterFactory;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import java.util.Arrays;
import java.util.Collections;
import java.util.stream.Stream;
@ -39,7 +40,7 @@ class DoubleExponentialHistogramBucketsTest {
b.record(1);
b.record(1);
b.record(1);
assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L));
MetricAssertions.assertThat(b).hasTotalCount(3).hasCounts(Collections.singletonList(3L));
}
@ParameterizedTest
@ -58,7 +59,10 @@ class DoubleExponentialHistogramBucketsTest {
b.record(2);
b.record(4);
assertThat(b.getScale()).isEqualTo(0);
assertThat(b).hasTotalCount(3).hasCounts(Arrays.asList(1L, 1L, 1L)).hasOffset(0);
MetricAssertions.assertThat(b)
.hasTotalCount(3)
.hasCounts(Arrays.asList(1L, 1L, 1L))
.hasOffset(0);
}
@ParameterizedTest

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
@ -152,14 +152,14 @@ class DoubleLastValueAggregatorTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleGauge()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasAttributes(Attributes.empty())
.hasStartEpochNanos(10)
.hasEpochNanos(100)
.hasValue(10));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point
.hasAttributes(Attributes.empty())
.hasStartEpochNanos(10)
.hasEpochNanos(100)
.hasValue(10)));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
@ -209,17 +209,17 @@ class DoubleSumAggregatorTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasDoubleSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(0)
.hasEpochNanos(100)
.hasAttributes(Attributes.empty())
.hasValue(10));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(0)
.hasEpochNanos(100)
.hasAttributes(Attributes.empty())
.hasValue(10)));
}
@Test
@ -247,8 +247,7 @@ class DoubleSumAggregatorTest {
0,
10,
100))
.hasDoubleSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1).hasExemplars(exemplar));
.hasDoubleSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar)));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
@ -195,17 +195,17 @@ class LongSumAggregatorTest {
.hasName("name")
.hasDescription("description")
.hasUnit("unit")
.hasLongSum()
.isCumulative()
.isMonotonic()
.points()
.satisfiesExactly(
point ->
assertThat(point)
.hasStartEpochNanos(0)
.hasEpochNanos(100)
.hasAttributes(Attributes.empty())
.hasValue(10));
.hasLongSumSatisfying(
sum ->
sum.isCumulative()
.isMonotonic()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(0)
.hasEpochNanos(100)
.hasAttributes(Attributes.empty())
.hasValue(10)));
}
@Test
@ -232,8 +232,7 @@ class LongSumAggregatorTest {
0,
10,
100))
.hasLongSum()
.points()
.satisfiesExactly(point -> assertThat(point).hasValue(1).hasExemplars(exemplar));
.hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar)));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.exemplar;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
@ -16,6 +16,7 @@ import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Random;
@ -43,7 +44,7 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1.1));
@ -55,7 +56,7 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
@ -74,7 +75,7 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(partial))
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1.1)
.hasFilteredAttributes(remaining));
@ -97,7 +98,7 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(all)
@ -130,7 +131,9 @@ class DoubleRandomFixedSizeExemplarReservoirTest {
reservoir.offerDoubleMeasurement(3, Attributes.of(key, 3L), Context.root());
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactlyInAnyOrder(
exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
}
}

View File

@ -5,12 +5,13 @@
package io.opentelemetry.sdk.metrics.internal.exemplar;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Arrays;
@ -36,7 +37,7 @@ class HistogramExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1.1));
@ -47,7 +48,7 @@ class HistogramExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
@ -59,7 +60,7 @@ class HistogramExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(4));
@ -78,9 +79,21 @@ class HistogramExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.hasSize(4)
.satisfiesExactlyInAnyOrder(
e -> assertThat(e).hasValue(-1.1).hasFilteredAttributes(Attributes.of(bucketKey, 0L)),
e -> assertThat(e).hasValue(1).hasFilteredAttributes(Attributes.of(bucketKey, 1L)),
e -> assertThat(e).hasValue(11).hasFilteredAttributes(Attributes.of(bucketKey, 2L)),
e -> assertThat(e).hasValue(21).hasFilteredAttributes(Attributes.of(bucketKey, 3L)));
e ->
MetricAssertions.assertThat(e)
.hasValue(-1.1)
.hasFilteredAttributes(Attributes.of(bucketKey, 0L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(1)
.hasFilteredAttributes(Attributes.of(bucketKey, 1L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(11)
.hasFilteredAttributes(Attributes.of(bucketKey, 2L)),
e ->
MetricAssertions.assertThat(e)
.hasValue(21)
.hasFilteredAttributes(Attributes.of(bucketKey, 3L)));
}
}

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.exemplar;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
@ -16,6 +16,7 @@ import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.testing.assertj.MetricAssertions;
import io.opentelemetry.sdk.testing.time.TestClock;
import java.time.Duration;
import java.util.Random;
@ -43,7 +44,7 @@ class LongRandomFixedSizeExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(1));
@ -55,7 +56,7 @@ class LongRandomFixedSizeExemplarReservoirTest {
.hasSize(1)
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasFilteredAttributes(Attributes.empty())
.hasValue(2));
@ -74,7 +75,7 @@ class LongRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(partial))
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(remaining));
@ -97,7 +98,7 @@ class LongRandomFixedSizeExemplarReservoirTest {
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactly(
exemplar ->
assertThat(exemplar)
MetricAssertions.assertThat(exemplar)
.hasEpochNanos(clock.now())
.hasValue(1)
.hasFilteredAttributes(all)
@ -130,7 +131,9 @@ class LongRandomFixedSizeExemplarReservoirTest {
reservoir.offerLongMeasurement(3, Attributes.of(key, 3L), Context.root());
assertThat(reservoir.collectAndReset(Attributes.empty()))
.satisfiesExactlyInAnyOrder(
exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar -> assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(2),
exemplar ->
MetricAssertions.assertThat(exemplar).hasEpochNanos(clock.now()).hasValue(3));
}
}

View File

@ -5,7 +5,8 @@
package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@ -84,20 +85,16 @@ class AsynchronousMetricStorageTest {
/* suppressSynchronousCollection= */ false))
.satisfies(
metricData ->
assertThat(metricData.getLongSumData().getPoints())
.satisfiesExactlyInAnyOrder(
pointData ->
assertThat(pointData)
.hasValue(1)
.hasAttributes(Attributes.builder().put("key", "a").build()),
pointData ->
assertThat(pointData)
.hasValue(2)
.hasAttributes(Attributes.builder().put("key", "b").build()),
pointData ->
assertThat(pointData)
.hasValue(3)
.hasAttributes(Attributes.builder().put("key", "c").build())));
assertThat(metricData)
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasValue(1).hasAttributes(attributeEntry("key", "a")),
point ->
point.hasValue(2).hasAttributes(attributeEntry("key", "b")),
point ->
point.hasValue(3).hasAttributes(attributeEntry("key", "c")))));
assertThat(logs.size()).isEqualTo(0);
}
@ -123,20 +120,18 @@ class AsynchronousMetricStorageTest {
/* suppressSynchronousCollection= */ false))
.satisfies(
metricData ->
assertThat(metricData.getDoubleSumData().getPoints())
.satisfiesExactlyInAnyOrder(
pointData ->
assertThat(pointData)
.hasValue(1.1)
.hasAttributes(Attributes.builder().put("key", "a").build()),
pointData ->
assertThat(pointData)
.hasValue(2.2)
.hasAttributes(Attributes.builder().put("key", "b").build()),
pointData ->
assertThat(pointData)
.hasValue(3.3)
.hasAttributes(Attributes.builder().put("key", "c").build())));
assertThat(metricData)
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasValue(1.1).hasAttributes(attributeEntry("key", "a")),
point ->
point.hasValue(2.2).hasAttributes(attributeEntry("key", "b")),
point ->
point
.hasValue(3.3)
.hasAttributes(attributeEntry("key", "c")))));
assertThat(logs.size()).isEqualTo(0);
}
@ -164,12 +159,12 @@ class AsynchronousMetricStorageTest {
/* suppressSynchronousCollection= */ false))
.satisfies(
metricData ->
assertThat(metricData.getLongSumData().getPoints())
.satisfiesExactlyInAnyOrder(
pointData ->
assertThat(pointData)
.hasValue(1)
.hasAttributes(Attributes.builder().put("key1", "a").build())));
assertThat(metricData)
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasValue(1).hasAttributes(attributeEntry("key1", "a")))));
assertThat(logs.size()).isEqualTo(0);
}
@ -221,12 +216,12 @@ class AsynchronousMetricStorageTest {
/* suppressSynchronousCollection= */ false))
.satisfies(
metricData ->
assertThat(metricData.getLongSumData().getPoints())
.satisfiesExactlyInAnyOrder(
pointData ->
assertThat(pointData)
.hasValue(1)
.hasAttributes(Attributes.builder().put("key1", "a").build())));
assertThat(metricData)
.hasLongSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point ->
point.hasValue(1).hasAttributes(attributeEntry("key1", "a")))));
logs.assertContains("Instrument name has recorded multiple values for the same attributes");
}
}

View File

@ -5,7 +5,8 @@
package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
@ -91,15 +92,12 @@ public class SynchronousMetricStorageTest {
testClock.now(),
false);
assertThat(md)
.hasDoubleGauge()
.points()
.allSatisfy(
p ->
assertThat(p)
.attributes()
.hasSize(2)
.containsEntry("modifiedK", "modifiedV")
.containsEntry("K", "V"));
.hasDoubleGaugeSatisfying(
gauge ->
gauge.hasPointsSatisfying(
point ->
point.hasAttributes(
attributeEntry("K", "V"), attributeEntry("modifiedK", "modifiedV"))));
}
@Test

View File

@ -5,7 +5,7 @@
package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.testing.assertj.MetricAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
@ -14,11 +14,9 @@ import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.InstrumentValueType;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleAccumulation;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter;
@ -88,12 +86,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -105,12 +102,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(30).hasValue(6));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(30).hasValue(6)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -122,12 +118,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -139,12 +134,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(35).hasValue(8));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(35).hasValue(8)));
}
@Test
@ -168,13 +162,19 @@ class TemporalMetricStorageTest {
measurement1,
0,
10))
.hasDoubleSum()
.isCumulative()
.points()
.hasSize(MetricStorageUtils.MAX_ACCUMULATIONS)
.isNotEmpty()
.allSatisfy(point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.satisfies(
sumPoint ->
assertThat(sumPoint.getPoints())
.hasSize(MetricStorageUtils.MAX_ACCUMULATIONS)
.allSatisfy(
sumPointData -> {
assertThat(sumPointData.getStartEpochNanos()).isEqualTo(0);
assertThat(sumPointData.getEpochNanos()).isEqualTo(10);
assertThat(sumPointData.getValue()).isEqualTo(3);
})));
// Send in new measurement at time 20 for collector 1, with attr2
// Result should drop accumulation for attr1, only reporting accumulation for attr2
Map<Attributes, DoubleAccumulation> measurement2 = new HashMap<>();
@ -193,13 +193,8 @@ class TemporalMetricStorageTest {
measurement2,
0,
20))
.hasDoubleSum()
.isCumulative()
.points()
.hasSize(1) // Limiting to only recent measurements means we cut everything here.
.isNotEmpty()
.extracting(PointData::getAttributes)
.contains(attr2);
.hasDoubleSumSatisfying(
sum -> sum.isCumulative().hasPointsSatisfying(point -> point.hasAttributes(attr2)));
}
@Test
@ -221,12 +216,11 @@ class TemporalMetricStorageTest {
measurement1,
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.hasSize(1)
.isNotEmpty()
.contains(ImmutableDoublePointData.create(0, 10, attr1, 3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 20 for collector 1, with attr2
// Result should drop accumulation for attr1, only reporting accumulation for attr2
@ -243,12 +237,16 @@ class TemporalMetricStorageTest {
measurement2,
0,
20))
.hasDoubleSum()
.isDelta()
.points()
.hasSize(1)
.isNotEmpty()
.containsExactly(ImmutableDoublePointData.create(10, 20, attr2, 7));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(10)
.hasEpochNanos(20)
.hasAttributes(attr2)
.hasValue(7)));
}
@Test
@ -267,12 +265,10 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -284,12 +280,10 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -301,12 +295,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -318,12 +311,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2)));
}
@Test
@ -341,12 +333,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -358,12 +349,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(3)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -375,12 +365,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
40))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(40).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(40).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -392,12 +381,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(2)));
// Send in new measurement at time 60 for collector 2
assertThat(
storage.buildMetricFor(
@ -409,12 +397,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(8));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(8)));
}
@Test
@ -433,12 +420,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -450,12 +436,10 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(30).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(30).hasValue(3)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -467,12 +451,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -484,12 +467,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(35).hasValue(2));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(35).hasValue(2)));
}
@Test
@ -511,12 +493,16 @@ class TemporalMetricStorageTest {
measurement1,
0,
10))
.hasDoubleSum()
.isCumulative()
.points()
.hasSize(1)
.isNotEmpty()
.contains(ImmutableDoublePointData.create(0, 10, attr1, 3));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(0)
.hasEpochNanos(10)
.hasAttributes(attr1)
.hasValue(3)));
// Send in new measurement at time 20 for collector 1, with attr2
// Result should drop accumulation for attr1, only reporting accumulation for attr2
@ -533,12 +519,16 @@ class TemporalMetricStorageTest {
measurement2,
0,
20))
.hasDoubleSum()
.isCumulative()
.points()
.hasSize(1)
.isNotEmpty()
.containsExactly(ImmutableDoublePointData.create(0, 20, attr2, 7));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(0)
.hasEpochNanos(20)
.hasAttributes(attr2)
.hasValue(7)));
}
@Test
@ -560,12 +550,16 @@ class TemporalMetricStorageTest {
measurement1,
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.hasSize(1)
.isNotEmpty()
.contains(ImmutableDoublePointData.create(0, 10, attr1, 3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(0)
.hasEpochNanos(10)
.hasAttributes(attr1)
.hasValue(3)));
// Send in new measurement at time 20 for collector 1, with attr2
// Result should drop accumulation for attr1, only reporting accumulation for attr2
@ -582,12 +576,16 @@ class TemporalMetricStorageTest {
measurement2,
0,
20))
.hasDoubleSum()
.isDelta()
.points()
.hasSize(1)
.isNotEmpty()
.containsExactly(ImmutableDoublePointData.create(10, 20, attr2, 7));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point ->
point
.hasStartEpochNanos(10)
.hasEpochNanos(20)
.hasAttributes(attr2)
.hasValue(7)));
}
@Test
@ -606,12 +604,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -623,12 +620,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -640,12 +636,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -657,12 +652,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1)));
}
@Test
@ -681,12 +675,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
10))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(10).hasValue(3)));
// Send in new measurement at time 30 for collector 1
assertThat(
storage.buildMetricFor(
@ -698,12 +691,11 @@ class TemporalMetricStorageTest {
createMeasurement(3),
0,
30))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(10).hasEpochNanos(30).hasValue(0)));
// Send in new measurement at time 40 for collector 2
assertThat(
storage.buildMetricFor(
@ -715,12 +707,11 @@ class TemporalMetricStorageTest {
createMeasurement(4),
0,
60))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(4)));
// Send in new measurement at time 35 for collector 1
assertThat(
storage.buildMetricFor(
@ -732,12 +723,11 @@ class TemporalMetricStorageTest {
createMeasurement(2),
0,
35))
.hasDoubleSum()
.isDelta()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1));
.hasDoubleSumSatisfying(
sum ->
sum.isDelta()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(30).hasEpochNanos(35).hasValue(-1)));
// Send in new measurement at time 60 for collector 2
assertThat(
@ -750,11 +740,10 @@ class TemporalMetricStorageTest {
createMeasurement(5),
0,
60))
.hasDoubleSum()
.isCumulative()
.points()
.isNotEmpty()
.satisfiesExactly(
point -> assertThat(point).hasStartEpochNanos(0).hasEpochNanos(60).hasValue(5));
.hasDoubleSumSatisfying(
sum ->
sum.isCumulative()
.hasPointsSatisfying(
point -> point.hasStartEpochNanos(0).hasEpochNanos(60).hasValue(5)));
}
}

View File

@ -5,8 +5,12 @@
package io.opentelemetry.sdk.testing.assertj;
import static org.assertj.core.api.Assertions.assertThat;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import java.util.Arrays;
import java.util.function.Consumer;
import org.assertj.core.api.Assertions;
/** Test assertions for {@link HistogramPointData}. */
@ -77,4 +81,30 @@ public final class HistogramPointAssert
Assertions.assertThat(actual.getCounts()).as("bucketCounts").containsExactly(bigCounts);
return this;
}
/** Asserts the point has the specified exemplars, in any order. */
public HistogramPointAssert hasExemplars(DoubleExemplarData... exemplars) {
isNotNull();
Assertions.assertThat(actual.getExemplars())
.as("exemplars")
.containsExactlyInAnyOrder(exemplars);
return myself;
}
/** Asserts the point has exemplars matching all of the assertions, in any order. */
@SafeVarargs
@SuppressWarnings("varargs")
public final HistogramPointAssert hasExemplarsSatisfying(
Consumer<DoubleExemplarAssert>... assertions) {
return hasExemplarsSatisfying(Arrays.asList(assertions));
}
/** Asserts the point has exemplars matching all of the assertions, in any order. */
public HistogramPointAssert hasExemplarsSatisfying(
Iterable<? extends Consumer<DoubleExemplarAssert>> assertions) {
isNotNull();
assertThat(actual.getExemplars())
.satisfiesExactlyInAnyOrder(AssertUtil.toConsumers(assertions, DoubleExemplarAssert::new));
return myself;
}
}

View File

@ -41,7 +41,6 @@ dependencies {
// dependencies.
isTransitive = false
}
jmh(project(":sdk:metrics-testing"))
jmh(project(":exporters:jaeger-thrift"))
jmh(project(":exporters:otlp:trace")) {
// The opentelemetry-exporter-otlp-trace depends on this project itself. So don"t pull in