Memory mode: Adding support for synchronous instruments - explicit histogram (#6153)

Co-authored-by: jack-berg <34418638+jack-berg@users.noreply.github.com>
This commit is contained in:
Asaf Mesika 2024-01-25 20:18:28 +02:00 committed by GitHub
parent 737dfef4e6
commit 8d1cad2ae1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 659 additions and 55 deletions

View File

@ -65,6 +65,10 @@ public class DynamicPrimitiveLongList extends AbstractList<Long> {
return list;
}
public static DynamicPrimitiveLongList ofSubArrayCapacity(int subarrayCapacity) {
return new DynamicPrimitiveLongList(subarrayCapacity);
}
public static DynamicPrimitiveLongList empty() {
return new DynamicPrimitiveLongList();
}

View File

@ -17,11 +17,13 @@ public enum HistogramAggregationParam {
new DoubleExplicitBucketHistogramAggregator(
ExplicitBucketHistogramUtils.createBoundaryArray(
ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES),
ExemplarReservoir::doubleNoSamples)),
ExemplarReservoir::doubleNoSamples,
IMMUTABLE_DATA)),
EXPLICIT_SINGLE_BUCKET(
new DoubleExplicitBucketHistogramAggregator(
ExplicitBucketHistogramUtils.createBoundaryArray(Collections.emptyList()),
ExemplarReservoir::doubleNoSamples)),
ExemplarReservoir::doubleNoSamples,
IMMUTABLE_DATA)),
EXPONENTIAL_SMALL_CIRCULAR_BUFFER(
new DoubleBase2ExponentialHistogramAggregator(
ExemplarReservoir::doubleNoSamples, 20, 0, IMMUTABLE_DATA)),

View File

@ -37,7 +37,7 @@ public class ProfileBenchmark {
// Parameters
AggregationTemporality aggregationTemporality = AggregationTemporality.DELTA;
MemoryMode memoryMode = MemoryMode.REUSABLE_DATA;
TestInstrumentType testInstrumentType = TestInstrumentType.EXPONENTIAL_HISTOGRAM;
TestInstrumentType testInstrumentType = TestInstrumentType.EXPLICIT_BUCKET;
InstrumentGarbageCollectionBenchmark.ThreadState benchmarkSetup =
new InstrumentGarbageCollectionBenchmark.ThreadState();

View File

@ -9,6 +9,7 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.SdkMeterProvider;
import io.opentelemetry.sdk.metrics.internal.state.tester.AsyncCounterTester;
import io.opentelemetry.sdk.metrics.internal.state.tester.ExplicitBucketHistogramTester;
import io.opentelemetry.sdk.metrics.internal.state.tester.ExponentialHistogramTester;
import java.util.List;
import java.util.Random;
@ -25,6 +26,12 @@ public enum TestInstrumentType {
InstrumentTester createInstrumentTester() {
return new ExponentialHistogramTester();
}
},
EXPLICIT_BUCKET() {
@Override
InstrumentTester createInstrumentTester() {
return new ExplicitBucketHistogramTester();
}
};
abstract InstrumentTester createInstrumentTester();

View File

@ -0,0 +1,62 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.state.tester;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.DoubleHistogram;
import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.SdkMeterProvider;
import io.opentelemetry.sdk.metrics.internal.aggregator.ExplicitBucketHistogramUtils;
import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.InstrumentTester;
import io.opentelemetry.sdk.metrics.internal.state.TestInstrumentType.TestInstrumentsState;
import java.util.List;
import java.util.Random;
public class ExplicitBucketHistogramTester implements InstrumentTester {
static class ExplicitHistogramState implements TestInstrumentsState {
public double maxBucketValue;
DoubleHistogram doubleHistogram;
}
private static final int measurementsPerAttributeSet = 1_000;
@Override
public Aggregation testedAggregation() {
return Aggregation.explicitBucketHistogram();
}
@Override
public TestInstrumentsState buildInstruments(
double instrumentCount,
SdkMeterProvider sdkMeterProvider,
List<Attributes> attributesList,
Random random) {
ExplicitHistogramState state = new ExplicitHistogramState();
state.doubleHistogram =
sdkMeterProvider.get("meter").histogramBuilder("test.explicit.histogram").build();
state.maxBucketValue =
ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES.get(
ExplicitBucketHistogramUtils.DEFAULT_HISTOGRAM_BUCKET_BOUNDARIES.size() - 1);
return state;
}
@SuppressWarnings("ForLoopReplaceableByForEach") // This is for GC sensitivity testing: no streams
@Override
public void recordValuesInInstruments(
TestInstrumentsState testInstrumentsState, List<Attributes> attributesList, Random random) {
ExplicitHistogramState state = (ExplicitHistogramState) testInstrumentsState;
for (int j = 0; j < attributesList.size(); j++) {
Attributes attributes = attributesList.get(j);
for (int i = 0; i < measurementsPerAttributeSet; i++) {
state.doubleHistogram.record(
random.nextInt(Double.valueOf(state.maxBucketValue * 1.1).intValue()), attributes);
}
}
}
}

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.internal.GuardedBy;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.common.export.MemoryMode;
import io.opentelemetry.sdk.internal.PrimitiveLongList;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
@ -16,6 +17,7 @@ import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.MutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource;
@ -26,6 +28,7 @@ import java.util.Collections;
import java.util.List;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier;
import javax.annotation.Nullable;
/**
* Aggregator that generates explicit bucket histograms.
@ -36,6 +39,7 @@ import java.util.function.Supplier;
public final class DoubleExplicitBucketHistogramAggregator
implements Aggregator<HistogramPointData, DoubleExemplarData> {
private final double[] boundaries;
private final MemoryMode memoryMode;
// a cache for converting to MetricData
private final List<Double> boundaryList;
@ -47,10 +51,14 @@ public final class DoubleExplicitBucketHistogramAggregator
*
* @param boundaries Bucket boundaries, in-order.
* @param reservoirSupplier Supplier of exemplar reservoirs per-stream.
* @param memoryMode The {@link MemoryMode} to use in this aggregator.
*/
public DoubleExplicitBucketHistogramAggregator(
double[] boundaries, Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier) {
double[] boundaries,
Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier,
MemoryMode memoryMode) {
this.boundaries = boundaries;
this.memoryMode = memoryMode;
List<Double> boundaryList = new ArrayList<>(this.boundaries.length);
for (double v : this.boundaries) {
@ -62,7 +70,7 @@ public final class DoubleExplicitBucketHistogramAggregator
@Override
public AggregatorHandle<HistogramPointData, DoubleExemplarData> createHandle() {
return new Handle(this.boundaryList, this.boundaries, reservoirSupplier.get());
return new Handle(this.boundaryList, this.boundaries, reservoirSupplier.get(), memoryMode);
}
@Override
@ -104,10 +112,14 @@ public final class DoubleExplicitBucketHistogramAggregator
private final ReentrantLock lock = new ReentrantLock();
// Used only when MemoryMode = REUSABLE_DATA
@Nullable private MutableHistogramPointData reusablePoint;
Handle(
List<Double> boundaryList,
double[] boundaries,
ExemplarReservoir<DoubleExemplarData> reservoir) {
ExemplarReservoir<DoubleExemplarData> reservoir,
MemoryMode memoryMode) {
super(reservoir);
this.boundaryList = boundaryList;
this.boundaries = boundaries;
@ -116,6 +128,9 @@ public final class DoubleExplicitBucketHistogramAggregator
this.min = Double.MAX_VALUE;
this.max = -1;
this.count = 0;
if (memoryMode == MemoryMode.REUSABLE_DATA) {
this.reusablePoint = new MutableHistogramPointData(counts.length);
}
}
@Override
@ -127,19 +142,36 @@ public final class DoubleExplicitBucketHistogramAggregator
boolean reset) {
lock.lock();
try {
HistogramPointData pointData =
ImmutableHistogramPointData.create(
startEpochNanos,
epochNanos,
attributes,
sum,
this.count > 0,
this.min,
this.count > 0,
this.max,
boundaryList,
PrimitiveLongList.wrap(Arrays.copyOf(counts, counts.length)),
exemplars);
HistogramPointData pointData;
if (reusablePoint == null) {
pointData =
ImmutableHistogramPointData.create(
startEpochNanos,
epochNanos,
attributes,
sum,
this.count > 0,
this.min,
this.count > 0,
this.max,
boundaryList,
PrimitiveLongList.wrap(Arrays.copyOf(counts, counts.length)),
exemplars);
} else /* REUSABLE_DATA */ {
pointData =
reusablePoint.set(
startEpochNanos,
epochNanos,
attributes,
sum,
this.count > 0,
this.min,
this.count > 0,
this.max,
boundaryList,
counts,
exemplars);
}
if (reset) {
this.sum = 0;
this.min = Double.MAX_VALUE;

View File

@ -0,0 +1,35 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.data;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import java.util.List;
/**
* Validations for {@link HistogramPointData}.
*
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time.
*/
final class HistogramPointDataValidations {
private HistogramPointDataValidations() {}
static void validateIsStrictlyIncreasing(List<Double> xs) {
for (int i = 0; i < xs.size() - 1; i++) {
if (xs.get(i).compareTo(xs.get(i + 1)) >= 0) {
throw new IllegalArgumentException("invalid boundaries: " + xs);
}
}
}
static void validateFiniteBoundaries(List<Double> boundaries) {
if (!boundaries.isEmpty()
&& (boundaries.get(0).isInfinite() || boundaries.get(boundaries.size() - 1).isInfinite())) {
throw new IllegalArgumentException("invalid boundaries: contains explicit +/-Inf");
}
}
}

View File

@ -5,6 +5,9 @@
package io.opentelemetry.sdk.metrics.internal.data;
import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateFiniteBoundaries;
import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateIsStrictlyIncreasing;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.internal.PrimitiveLongList;
@ -85,13 +88,8 @@ public abstract class ImmutableHistogramPointData implements HistogramPointData
+ " instead of "
+ counts.size());
}
if (!isStrictlyIncreasing(boundaries)) {
throw new IllegalArgumentException("invalid boundaries: " + boundaries);
}
if (!boundaries.isEmpty()
&& (boundaries.get(0).isInfinite() || boundaries.get(boundaries.size() - 1).isInfinite())) {
throw new IllegalArgumentException("invalid boundaries: contains explicit +/-Inf");
}
validateIsStrictlyIncreasing(boundaries);
validateFiniteBoundaries(boundaries);
long totalCount = 0;
for (long c : PrimitiveLongList.toArray(counts)) {
@ -113,13 +111,4 @@ public abstract class ImmutableHistogramPointData implements HistogramPointData
}
ImmutableHistogramPointData() {}
private static boolean isStrictlyIncreasing(List<Double> xs) {
for (int i = 0; i < xs.size() - 1; i++) {
if (xs.get(i).compareTo(xs.get(i + 1)) >= 0) {
return false;
}
}
return true;
}
}

View File

@ -0,0 +1,249 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.data;
import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateFiniteBoundaries;
import static io.opentelemetry.sdk.metrics.internal.data.HistogramPointDataValidations.validateIsStrictlyIncreasing;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.internal.DynamicPrimitiveLongList;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import java.util.Collections;
import java.util.List;
/**
* A mutable {@link HistogramPointData}
*
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time.
*
* <p>This class is not thread-safe.
*/
public final class MutableHistogramPointData implements HistogramPointData {
private long startEpochNanos;
private long epochNanos;
private Attributes attributes = Attributes.empty();
private double sum;
private long count;
private boolean hasMin;
private double min;
private boolean hasMax;
private double max;
private List<Double> boundaries = Collections.emptyList();
private final DynamicPrimitiveLongList counts;
private List<DoubleExemplarData> exemplars = Collections.emptyList();
public MutableHistogramPointData(int buckets) {
this.counts = DynamicPrimitiveLongList.ofSubArrayCapacity(buckets);
this.counts.resizeAndClear(buckets);
}
@SuppressWarnings({"TooManyParameters", "ForLoopReplaceableByForEach"})
public MutableHistogramPointData set(
long startEpochNanos,
long epochNanos,
Attributes attributes,
double sum,
boolean hasMin,
double min,
boolean hasMax,
double max,
List<Double> boundaries,
long[] counts,
List<DoubleExemplarData> exemplars) {
if (this.counts.size() != boundaries.size() + 1) {
throw new IllegalArgumentException(
"invalid boundaries: size should be "
+ (this.counts.size() - 1)
+ " but was "
+ boundaries.size());
}
if (this.counts.size() != counts.length) {
throw new IllegalArgumentException(
"invalid counts: size should be " + this.counts.size() + " but was " + counts.length);
}
validateIsStrictlyIncreasing(boundaries);
validateFiniteBoundaries(boundaries);
long totalCount = 0;
for (int i = 0; i < counts.length; i++) {
totalCount += counts[i];
}
this.startEpochNanos = startEpochNanos;
this.epochNanos = epochNanos;
this.attributes = attributes;
this.sum = sum;
this.count = totalCount;
this.hasMin = hasMin;
this.min = min;
this.hasMax = hasMax;
this.max = max;
this.boundaries = boundaries;
for (int i = 0; i < counts.length; i++) {
this.counts.setLong(i, counts[i]);
}
this.exemplars = exemplars;
return this;
}
@Override
public long getStartEpochNanos() {
return startEpochNanos;
}
@Override
public long getEpochNanos() {
return epochNanos;
}
@Override
public Attributes getAttributes() {
return attributes;
}
@Override
public double getSum() {
return sum;
}
@Override
public long getCount() {
return count;
}
@Override
public boolean hasMin() {
return hasMin;
}
@Override
public double getMin() {
return min;
}
@Override
public boolean hasMax() {
return hasMax;
}
@Override
public double getMax() {
return max;
}
@Override
public List<Double> getBoundaries() {
return boundaries;
}
@Override
public List<Long> getCounts() {
return counts;
}
@Override
public List<DoubleExemplarData> getExemplars() {
return exemplars;
}
@Override
public String toString() {
return "MutableHistogramPointData{"
+ "startEpochNanos="
+ startEpochNanos
+ ", "
+ "epochNanos="
+ epochNanos
+ ", "
+ "attributes="
+ attributes
+ ", "
+ "sum="
+ sum
+ ", "
+ "count="
+ count
+ ", "
+ "hasMin="
+ hasMin
+ ", "
+ "min="
+ min
+ ", "
+ "hasMax="
+ hasMax
+ ", "
+ "max="
+ max
+ ", "
+ "boundaries="
+ boundaries
+ ", "
+ "counts="
+ counts
+ ", "
+ "exemplars="
+ exemplars
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof HistogramPointData) {
HistogramPointData that = (HistogramPointData) o;
return this.startEpochNanos == that.getStartEpochNanos()
&& this.epochNanos == that.getEpochNanos()
&& this.attributes.equals(that.getAttributes())
&& Double.doubleToLongBits(this.sum) == Double.doubleToLongBits(that.getSum())
&& this.count == that.getCount()
&& this.hasMin == that.hasMin()
&& Double.doubleToLongBits(this.min) == Double.doubleToLongBits(that.getMin())
&& this.hasMax == that.hasMax()
&& Double.doubleToLongBits(this.max) == Double.doubleToLongBits(that.getMax())
&& this.boundaries.equals(that.getBoundaries())
&& this.counts.equals(that.getCounts())
&& this.exemplars.equals(that.getExemplars());
}
return false;
}
@Override
public int hashCode() {
int hashcode = 1;
hashcode *= 1000003;
hashcode ^= (int) ((startEpochNanos >>> 32) ^ startEpochNanos);
hashcode *= 1000003;
hashcode ^= (int) ((epochNanos >>> 32) ^ epochNanos);
hashcode *= 1000003;
hashcode ^= attributes.hashCode();
hashcode *= 1000003;
hashcode ^= (int) ((Double.doubleToLongBits(sum) >>> 32) ^ Double.doubleToLongBits(sum));
hashcode *= 1000003;
hashcode ^= (int) ((count >>> 32) ^ count);
hashcode *= 1000003;
hashcode ^= hasMin ? 1231 : 1237;
hashcode *= 1000003;
hashcode ^= (int) ((Double.doubleToLongBits(min) >>> 32) ^ Double.doubleToLongBits(min));
hashcode *= 1000003;
hashcode ^= hasMax ? 1231 : 1237;
hashcode *= 1000003;
hashcode ^= (int) ((Double.doubleToLongBits(max) >>> 32) ^ Double.doubleToLongBits(max));
hashcode *= 1000003;
hashcode ^= boundaries.hashCode();
hashcode *= 1000003;
hashcode ^= counts.hashCode();
hashcode *= 1000003;
hashcode ^= exemplars.hashCode();
return hashcode;
}
}

View File

@ -62,7 +62,8 @@ public final class ExplicitBucketHistogramAggregation implements Aggregation, Ag
exemplarFilter,
ExemplarReservoir.longToDouble(
ExemplarReservoir.histogramBucketReservoir(
Clock.getDefault(), bucketBoundaries))));
Clock.getDefault(), bucketBoundaries))),
memoryMode);
}
@Override

View File

@ -0,0 +1,10 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
/** View related internal classes. */
@ParametersAreNonnullByDefault
package io.opentelemetry.sdk.metrics.internal.view;
import javax.annotation.ParametersAreNonnullByDefault;

View File

@ -14,6 +14,7 @@ import io.opentelemetry.api.trace.TraceFlags;
import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.common.export.MemoryMode;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
@ -21,6 +22,7 @@ import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.MutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource;
@ -34,6 +36,8 @@ import java.util.stream.Collectors;
import java.util.stream.DoubleStream;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
@ -51,17 +55,26 @@ class DoubleExplicitBucketHistogramAggregatorTest {
InstrumentationScopeInfo.empty();
private static final MetricDescriptor METRIC_DESCRIPTOR =
MetricDescriptor.create("name", "description", "unit");
private static final DoubleExplicitBucketHistogramAggregator aggregator =
new DoubleExplicitBucketHistogramAggregator(boundaries, ExemplarReservoir::doubleNoSamples);
private DoubleExplicitBucketHistogramAggregator aggregator;
@Test
void createHandle() {
private void init(MemoryMode memoryMode) {
aggregator =
new DoubleExplicitBucketHistogramAggregator(
boundaries, ExemplarReservoir::doubleNoSamples, memoryMode);
}
@ParameterizedTest
@EnumSource(MemoryMode.class)
void createHandle(MemoryMode memoryMode) {
init(memoryMode);
assertThat(aggregator.createHandle())
.isInstanceOf(DoubleExplicitBucketHistogramAggregator.Handle.class);
}
@Test
void testRecordings() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void testRecordings(MemoryMode memoryMode) {
init(memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
aggregatorHandle.recordLong(20);
@ -84,8 +97,9 @@ class DoubleExplicitBucketHistogramAggregatorTest {
Arrays.asList(1L, 1L, 1L, 1L)));
}
@Test
void aggregateThenMaybeReset_WithExemplars() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void aggregateThenMaybeReset_WithExemplars(MemoryMode memoryMode) {
Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar =
ImmutableDoubleExemplarData.create(
@ -100,7 +114,7 @@ class DoubleExplicitBucketHistogramAggregatorTest {
List<DoubleExemplarData> exemplars = Collections.singletonList(exemplar);
Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars);
DoubleExplicitBucketHistogramAggregator aggregator =
new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir);
new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir, memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
aggregatorHandle.recordDouble(0, attributes, Context.root());
@ -121,8 +135,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
exemplars));
}
@Test
void aggregateThenMaybeReset() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void aggregateThenMaybeReset(MemoryMode memoryMode) {
init(memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
@ -159,8 +175,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
Arrays.asList(1L, 0L, 0L, 0L)));
}
@Test
void toMetricData() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void toMetricData(MemoryMode memoryMode) {
init(memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
aggregatorHandle.recordLong(10);
@ -180,8 +198,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
.isEqualTo(AggregationTemporality.DELTA);
}
@Test
void toMetricDataWithExemplars() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void toMetricDataWithExemplars(MemoryMode memoryMode) {
init(memoryMode);
Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar =
ImmutableDoubleExemplarData.create(
@ -226,8 +246,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
.hasExemplars(exemplar)));
}
@Test
void testHistogramCounts() {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void testHistogramCounts(MemoryMode memoryMode) {
init(memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
aggregatorHandle.recordDouble(1.1);
@ -237,8 +259,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
assertThat(point.getCounts().size()).isEqualTo(boundaries.length + 1);
}
@Test
void testMultithreadedUpdates() throws InterruptedException {
@ParameterizedTest
@EnumSource(MemoryMode.class)
void testMultithreadedUpdates(MemoryMode memoryMode) throws InterruptedException {
init(memoryMode);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
ImmutableList<Long> updates = ImmutableList.of(1L, 2L, 3L, 5L, 7L, 11L, 13L, 17L, 19L, 23L);
@ -278,4 +302,28 @@ class DoubleExplicitBucketHistogramAggregatorTest {
boundariesList,
Arrays.asList(50000L, 50000L, 0L, 0L)));
}
@Test
void testReusableDataMemoryMode() {
init(MemoryMode.REUSABLE_DATA);
AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
aggregatorHandle.recordLong(10);
aggregatorHandle.recordLong(20);
aggregatorHandle.recordLong(30);
aggregatorHandle.recordLong(40);
HistogramPointData pointData =
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false);
assertThat(pointData).isExactlyInstanceOf(MutableHistogramPointData.class);
aggregatorHandle.recordLong(10);
aggregatorHandle.recordLong(20);
HistogramPointData anotherPointData =
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false);
// The point data instance should be reused
assertThat(anotherPointData).isSameAs(pointData);
}
}

View File

@ -0,0 +1,165 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.data;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
import io.opentelemetry.api.common.AttributeKey;
import io.opentelemetry.api.common.Attributes;
import java.util.Arrays;
import java.util.Collections;
import org.junit.jupiter.api.Test;
public class MutableHistogramPointDataTest {
@Test
void testSanity() {
MutableHistogramPointData pointData = new MutableHistogramPointData(10);
assertThat(pointData.getSum()).isEqualTo(0);
assertThat(pointData.getCount()).isEqualTo(0);
assertThat(pointData.getBoundaries()).isEmpty();
assertThat(pointData.getCounts().size()).isEqualTo(10);
assertThat(pointData.getExemplars()).isEmpty();
pointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 2,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100},
Collections.emptyList());
assertThat(pointData.getSum()).isEqualTo(2);
assertThat(pointData.getCount()).isEqualTo(10 + 20 + 30 + 40 + 50 + 60 + 70 + 80 + 90 + 100);
assertThat(pointData.getAttributes().get(AttributeKey.stringKey("foo"))).isEqualTo("bar");
assertThat(pointData.getAttributes().size()).isEqualTo(1);
assertThat(pointData.getBoundaries())
.containsExactly(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0);
assertThat(pointData.getCounts().toArray())
.containsExactly(10L, 20L, 30L, 40L, 50L, 60L, 70L, 80L, 90L, 100L);
assertThat(pointData.getStartEpochNanos()).isEqualTo(10);
assertThat(pointData.getEpochNanos()).isEqualTo(20);
assertThat(pointData.hasMin()).isTrue();
assertThat(pointData.getMin()).isEqualTo(100);
assertThat(pointData.hasMax()).isTrue();
assertThat(pointData.getMax()).isEqualTo(1000);
assertThat(pointData.getExemplars()).isEmpty();
assertThat(pointData.toString())
.isEqualTo(
"MutableHistogramPointData{startEpochNanos=10, "
+ "epochNanos=20, "
+ "attributes={foo=\"bar\"}, "
+ "sum=2.0, "
+ "count=550, "
+ "hasMin=true, "
+ "min=100.0, "
+ "hasMax=true, "
+ "max=1000.0, "
+ "boundaries=[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], "
+ "counts=[10, 20, 30, 40, 50, 60, 70, 80, 90, 100], "
+ "exemplars=[]}");
MutableHistogramPointData anotherPointData = new MutableHistogramPointData(10);
// Same values
anotherPointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 2,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100},
Collections.emptyList());
assertThat(anotherPointData).isEqualTo(pointData);
assertThat(anotherPointData.hashCode()).isEqualTo(pointData.hashCode());
// Same values but different sum
anotherPointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 20000,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100},
Collections.emptyList());
assertThat(anotherPointData).isNotEqualTo(pointData);
assertThat(anotherPointData.hashCode()).isNotEqualTo(pointData.hashCode());
}
@Test()
void testBoundaries() {
MutableHistogramPointData pointData = new MutableHistogramPointData(10);
assertThatThrownBy(
() ->
pointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 2,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100},
Collections.emptyList()))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("invalid boundaries: size should be 9 but was 4");
assertThatThrownBy(
() ->
pointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 2,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(
1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, Double.POSITIVE_INFINITY),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60, 70, 80, 90, 100},
Collections.emptyList()))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("invalid boundaries: contains explicit +/-Inf");
}
@Test
void testCounts() {
MutableHistogramPointData pointData = new MutableHistogramPointData(10);
assertThatThrownBy(
() ->
pointData.set(
/* startEpochNanos= */ 10,
/* epochNanos= */ 20,
Attributes.of(AttributeKey.stringKey("foo"), "bar"),
/* sum= */ 2,
/* hasMin= */ true,
/* min= */ 100,
/* hasMax= */ true,
/* max= */ 1000,
/* boundaries= */ Arrays.asList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0),
/* counts= */ new long[] {10, 20, 30, 40, 50, 60},
Collections.emptyList()))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("invalid counts: size should be 10 but was 6");
}
}