Delete notion of accumulation (#5154)

This commit is contained in:
jack-berg 2023-02-03 10:56:17 -06:00 committed by GitHub
parent f43905a535
commit 9cfdf67a49
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 886 additions and 1036 deletions

View File

@ -5,14 +5,25 @@
package io.opentelemetry.sdk.metrics.internal.aggregator; package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
abstract class AbstractSumAggregator<T, U extends ExemplarData> implements Aggregator<T, U> { abstract class AbstractSumAggregator<T extends PointData, U extends ExemplarData>
implements Aggregator<T, U> {
private final boolean isMonotonic; private final boolean isMonotonic;
AbstractSumAggregator(InstrumentDescriptor instrumentDescriptor) { AbstractSumAggregator(InstrumentDescriptor instrumentDescriptor) {
this.isMonotonic = MetricDataUtils.isMonotonicInstrument(instrumentDescriptor); this.isMonotonic = isMonotonicInstrument(instrumentDescriptor);
}
/** Returns true if the instrument does not allow negative measurements. */
private static boolean isMonotonicInstrument(InstrumentDescriptor descriptor) {
InstrumentType type = descriptor.getType();
return type == InstrumentType.HISTOGRAM
|| type == InstrumentType.COUNTER
|| type == InstrumentType.OBSERVABLE_COUNTER;
} }
final boolean isMonotonic() { final boolean isMonotonic() {

View File

@ -5,36 +5,30 @@
package io.opentelemetry.sdk.metrics.internal.aggregator; package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.state.Measurement;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Map; import java.util.Collection;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable; import javax.annotation.concurrent.Immutable;
/** /**
* Aggregator represents the abstract class for all the available aggregations that can be computed * Aggregator represents the abstract class for all the available aggregations that can be computed
* during the accumulation phase for all the instrument. * during the collection phase for all the instruments.
*
* <p>The synchronous instruments will create an {@link AggregatorHandle} to record individual
* measurements synchronously, and for asynchronous the {@link #accumulateDoubleMeasurement} or
* {@link #accumulateLongMeasurement} will be used when reading values from the instrument
* callbacks.
* *
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change * <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time. * at any time.
*/ */
@Immutable @Immutable
public interface Aggregator<T, U extends ExemplarData> { public interface Aggregator<T extends PointData, U extends ExemplarData> {
/** Returns the drop aggregator, an aggregator that drops measurements. */ /** Returns the drop aggregator, an aggregator that drops measurements. */
static Aggregator<Object, DoubleExemplarData> drop() { static Aggregator<?, DoubleExemplarData> drop() {
return DropAggregator.INSTANCE; return DropAggregator.INSTANCE;
} }
@ -47,67 +41,41 @@ public interface Aggregator<T, U extends ExemplarData> {
AggregatorHandle<T, U> createHandle(); AggregatorHandle<T, U> createHandle();
/** /**
* Returns a new {@code Accumulation} for the given value. This MUST be used by the asynchronous * Returns a new DELTA point by computing the difference between two cumulative points.
* instruments to create {@code Accumulation} that are passed to the processor.
*
* @param value the given value to be used to create the {@code Accumulation}.
* @return a new {@code Accumulation} for the given value, or {@code null} if there are no
* recordings.
*/
@Nullable
default T accumulateLongMeasurement(long value, Attributes attributes, Context context) {
AggregatorHandle<T, U> handle = createHandle();
handle.recordLong(value, attributes, context);
return handle.accumulateThenMaybeReset(attributes, /* reset= */ true);
}
/**
* Returns a new {@code Accumulation} for the given value. This MUST be used by the asynchronous
* instruments to create {@code Accumulation} that are passed to the processor.
*
* @param value the given value to be used to create the {@code Accumulation}.
* @return a new {@code Accumulation} for the given value, or {@code null} if there are no
* recordings.
*/
@Nullable
default T accumulateDoubleMeasurement(double value, Attributes attributes, Context context) {
AggregatorHandle<T, U> handle = createHandle();
handle.recordDouble(value, attributes, context);
return handle.accumulateThenMaybeReset(attributes, /* reset= */ true);
}
/**
* Returns a new DELTA aggregation by comparing two cumulative measurements.
* *
* <p>Aggregators MUST implement diff if it can be used with asynchronous instruments. * <p>Aggregators MUST implement diff if it can be used with asynchronous instruments.
* *
* @param previousCumulative the previously captured accumulation. * @param previousCumulative the previously captured point.
* @param currentCumulative the newly captured (cumulative) accumulation. * @param currentCumulative the newly captured (cumulative) point.
* @return The resulting delta accumulation. * @return The resulting delta point.
*/ */
default T diff(T previousCumulative, T currentCumulative) { default T diff(T previousCumulative, T currentCumulative) {
throw new UnsupportedOperationException("This aggregator does not support diff."); throw new UnsupportedOperationException("This aggregator does not support diff.");
} }
/**
* Return a new point representing the measurement.
*
* <p>Aggregators MUST implement diff if it can be used with asynchronous instruments.
*/
default T toPoint(Measurement measurement) {
throw new UnsupportedOperationException("This aggregator does not support toPoint.");
}
/** /**
* Returns the {@link MetricData} that this {@code Aggregation} will produce. * Returns the {@link MetricData} that this {@code Aggregation} will produce.
* *
* @param resource the resource producing the metric. * @param resource the resource producing the metric.
* @param instrumentationScopeInfo the scope that instrumented the metric. * @param instrumentationScopeInfo the scope that instrumented the metric.
* @param metricDescriptor the name, description and unit of the metric. * @param metricDescriptor the name, description and unit of the metric.
* @param accumulationByLabels the map of Labels to Accumulation. * @param points list of points
* @param temporality the temporality of the accumulation. * @param temporality the temporality of the metric.
* @param startEpochNanos the startEpochNanos for the {@code Point}.
* @param epochNanos the epochNanos for the {@code Point}.
* @return the {@link MetricDataType} that this {@code Aggregation} will produce. * @return the {@link MetricDataType} that this {@code Aggregation} will produce.
*/ */
MetricData toMetricData( MetricData toMetricData(
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor metricDescriptor, MetricDescriptor metricDescriptor,
Map<Attributes, T> accumulationByLabels, Collection<T> points,
AggregationTemporality temporality, AggregationTemporality temporality);
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos);
} }

View File

@ -6,6 +6,7 @@
package io.opentelemetry.sdk.metrics.internal.aggregator; package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarFilter;
@ -28,7 +29,7 @@ public interface AggregatorFactory {
* @return a new {@link Aggregator}. {@link Aggregator#drop()} indicates no measurements should be * @return a new {@link Aggregator}. {@link Aggregator#drop()} indicates no measurements should be
* recorded. * recorded.
*/ */
<T, U extends ExemplarData> Aggregator<T, U> createAggregator( <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter); InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter);
/** /**

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context; import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.BoundStorageHandle; import io.opentelemetry.sdk.metrics.internal.state.BoundStorageHandle;
import java.util.List; import java.util.List;
@ -32,7 +33,8 @@ import javax.annotation.concurrent.ThreadSafe;
* at any time. * at any time.
*/ */
@ThreadSafe @ThreadSafe
public abstract class AggregatorHandle<T, U extends ExemplarData> implements BoundStorageHandle { public abstract class AggregatorHandle<T extends PointData, U extends ExemplarData>
implements BoundStorageHandle {
// Atomically counts the number of references (usages) while also keeping a state of // Atomically counts the number of references (usages) while also keeping a state of
// mapped/unmapped into a registry map. // mapped/unmapped into a registry map.
private final AtomicLong refCountMapped; private final AtomicLong refCountMapped;
@ -89,18 +91,29 @@ public abstract class AggregatorHandle<T, U extends ExemplarData> implements Bou
* current value in this {@code Aggregator}. * current value in this {@code Aggregator}.
*/ */
@Nullable @Nullable
public final T accumulateThenMaybeReset(Attributes attributes, boolean reset) { public final T aggregateThenMaybeReset(
long startEpochNanos, long epochNanos, Attributes attributes, boolean reset) {
if (!hasRecordings) { if (!hasRecordings) {
return null; return null;
} }
if (reset) { if (reset) {
hasRecordings = false; hasRecordings = false;
} }
return doAccumulateThenMaybeReset(exemplarReservoir.collectAndReset(attributes), reset); return doAggregateThenMaybeReset(
startEpochNanos,
epochNanos,
attributes,
exemplarReservoir.collectAndReset(attributes),
reset);
} }
/** Implementation of the {@link #accumulateThenMaybeReset(Attributes, boolean)}. */ /** Implementation of the {@link #aggregateThenMaybeReset(long, long, Attributes, boolean)} . */
protected abstract T doAccumulateThenMaybeReset(List<U> exemplars, boolean reset); protected abstract T doAggregateThenMaybeReset(
long startEpochNanos,
long epochNanos,
Attributes attributes,
List<U> exemplars,
boolean reset);
@Override @Override
public final void recordLong(long value, Attributes attributes, Context context) { public final void recordLong(long value, Attributes attributes, Context context) {

View File

@ -1,41 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.aggregator;
import com.google.auto.value.AutoValue;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/**
* An accumulation representing {@code long} values and exemplars.
*
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time.
*
* <p>Visible for testing.
*/
@Immutable
@AutoValue
public abstract class DoubleAccumulation {
static DoubleAccumulation create(double value, List<DoubleExemplarData> exemplars) {
return new AutoValue_DoubleAccumulation(value, exemplars);
}
public static DoubleAccumulation create(double value) {
return create(value, Collections.emptyList());
}
DoubleAccumulation() {}
/** The current value. */
public abstract double getValue();
/** Sampled measurements recorded during this accumulation. */
abstract List<DoubleExemplarData> getExemplars();
}

View File

@ -8,19 +8,22 @@ package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.internal.GuardedBy; import io.opentelemetry.api.internal.GuardedBy;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.internal.PrimitiveLongList;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -31,7 +34,7 @@ import java.util.function.Supplier;
* at any time. * at any time.
*/ */
public final class DoubleExplicitBucketHistogramAggregator public final class DoubleExplicitBucketHistogramAggregator
implements Aggregator<ExplicitBucketHistogramAccumulation, DoubleExemplarData> { implements Aggregator<HistogramPointData, DoubleExemplarData> {
private final double[] boundaries; private final double[] boundaries;
// a cache for converting to MetricData // a cache for converting to MetricData
@ -58,8 +61,8 @@ public final class DoubleExplicitBucketHistogramAggregator
} }
@Override @Override
public AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> createHandle() { public AggregatorHandle<HistogramPointData, DoubleExemplarData> createHandle() {
return new Handle(this.boundaries, reservoirSupplier.get()); return new Handle(this.boundaryList, this.boundaries, reservoirSupplier.get());
} }
@Override @Override
@ -67,30 +70,20 @@ public final class DoubleExplicitBucketHistogramAggregator
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor metricDescriptor, MetricDescriptor metricDescriptor,
Map<Attributes, ExplicitBucketHistogramAccumulation> accumulationByLabels, Collection<HistogramPointData> pointData,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
return ImmutableMetricData.createDoubleHistogram( return ImmutableMetricData.createDoubleHistogram(
resource, resource,
instrumentationScopeInfo, instrumentationScopeInfo,
metricDescriptor.getName(), metricDescriptor.getName(),
metricDescriptor.getDescription(), metricDescriptor.getDescription(),
metricDescriptor.getSourceInstrument().getUnit(), metricDescriptor.getSourceInstrument().getUnit(),
ImmutableHistogramData.create( ImmutableHistogramData.create(temporality, pointData));
temporality,
MetricDataUtils.toExplicitBucketHistogramPointList(
accumulationByLabels,
(temporality == AggregationTemporality.CUMULATIVE)
? startEpochNanos
: lastCollectionEpoch,
epochNanos,
boundaryList)));
} }
static final class Handle static final class Handle extends AggregatorHandle<HistogramPointData, DoubleExemplarData> {
extends AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> { // read-only
private final List<Double> boundaryList;
// read-only // read-only
private final double[] boundaries; private final double[] boundaries;
@ -111,8 +104,12 @@ public final class DoubleExplicitBucketHistogramAggregator
private final ReentrantLock lock = new ReentrantLock(); private final ReentrantLock lock = new ReentrantLock();
Handle(double[] boundaries, ExemplarReservoir<DoubleExemplarData> reservoir) { Handle(
List<Double> boundaryList,
double[] boundaries,
ExemplarReservoir<DoubleExemplarData> reservoir) {
super(reservoir); super(reservoir);
this.boundaryList = boundaryList;
this.boundaries = boundaries; this.boundaries = boundaries;
this.counts = new long[this.boundaries.length + 1]; this.counts = new long[this.boundaries.length + 1];
this.sum = 0; this.sum = 0;
@ -122,17 +119,24 @@ public final class DoubleExplicitBucketHistogramAggregator
} }
@Override @Override
protected ExplicitBucketHistogramAccumulation doAccumulateThenMaybeReset( protected HistogramPointData doAggregateThenMaybeReset(
List<DoubleExemplarData> exemplars, boolean reset) { long startEpochNanos,
long epochNanos,
Attributes attributes,
List<DoubleExemplarData> exemplars,
boolean reset) {
lock.lock(); lock.lock();
try { try {
ExplicitBucketHistogramAccumulation acc = HistogramPointData pointData =
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
startEpochNanos,
epochNanos,
attributes,
sum, sum,
this.count > 0, this.count > 0 ? this.min : null,
this.count > 0 ? this.min : -1, this.count > 0 ? this.max : null,
this.count > 0 ? this.max : -1, boundaryList,
Arrays.copyOf(counts, counts.length), PrimitiveLongList.wrap(Arrays.copyOf(counts, counts.length)),
exemplars); exemplars);
if (reset) { if (reset) {
this.sum = 0; this.sum = 0;
@ -141,7 +145,7 @@ public final class DoubleExplicitBucketHistogramAggregator
this.count = 0; this.count = 0;
Arrays.fill(this.counts, 0); Arrays.fill(this.counts, 0);
} }
return acc; return pointData;
} finally { } finally {
lock.unlock(); lock.unlock();
} }

View File

@ -11,12 +11,15 @@ import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -31,7 +34,7 @@ import javax.annotation.Nullable;
* at any time. * at any time.
*/ */
public final class DoubleExponentialHistogramAggregator public final class DoubleExponentialHistogramAggregator
implements Aggregator<ExponentialHistogramAccumulation, DoubleExemplarData> { implements Aggregator<ExponentialHistogramPointData, DoubleExemplarData> {
private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier; private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier;
private final int maxBuckets; private final int maxBuckets;
@ -52,7 +55,7 @@ public final class DoubleExponentialHistogramAggregator
} }
@Override @Override
public AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> createHandle() { public AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> createHandle() {
return new Handle(reservoirSupplier.get(), maxBuckets, maxScale); return new Handle(reservoirSupplier.get(), maxBuckets, maxScale);
} }
@ -61,29 +64,19 @@ public final class DoubleExponentialHistogramAggregator
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor metricDescriptor, MetricDescriptor metricDescriptor,
Map<Attributes, ExponentialHistogramAccumulation> accumulationByLabels, Collection<ExponentialHistogramPointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
return ImmutableMetricData.createExponentialHistogram( return ImmutableMetricData.createExponentialHistogram(
resource, resource,
instrumentationScopeInfo, instrumentationScopeInfo,
metricDescriptor.getName(), metricDescriptor.getName(),
metricDescriptor.getDescription(), metricDescriptor.getDescription(),
metricDescriptor.getSourceInstrument().getUnit(), metricDescriptor.getSourceInstrument().getUnit(),
ImmutableExponentialHistogramData.create( ImmutableExponentialHistogramData.create(temporality, points));
temporality,
MetricDataUtils.toExponentialHistogramPointList(
accumulationByLabels,
(temporality == AggregationTemporality.CUMULATIVE)
? startEpochNanos
: lastCollectionEpoch,
epochNanos)));
} }
static final class Handle static final class Handle
extends AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> { extends AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> {
private final int maxBuckets; private final int maxBuckets;
@Nullable private DoubleExponentialHistogramBuckets positiveBuckets; @Nullable private DoubleExponentialHistogramBuckets positiveBuckets;
@Nullable private DoubleExponentialHistogramBuckets negativeBuckets; @Nullable private DoubleExponentialHistogramBuckets negativeBuckets;
@ -106,18 +99,24 @@ public final class DoubleExponentialHistogramAggregator
} }
@Override @Override
protected synchronized ExponentialHistogramAccumulation doAccumulateThenMaybeReset( protected synchronized ExponentialHistogramPointData doAggregateThenMaybeReset(
List<DoubleExemplarData> exemplars, boolean reset) { long startEpochNanos,
ExponentialHistogramAccumulation acc = long epochNanos,
ExponentialHistogramAccumulation.create( Attributes attributes,
List<DoubleExemplarData> exemplars,
boolean reset) {
ExponentialHistogramPointData point =
ImmutableExponentialHistogramPointData.create(
scale, scale,
sum, sum,
this.count > 0, zeroCount,
this.count > 0 ? this.min : -1, this.count > 0 ? this.min : null,
this.count > 0 ? this.max : -1, this.count > 0 ? this.max : null,
resolveBuckets(this.positiveBuckets, scale, reset), resolveBuckets(this.positiveBuckets, scale, reset),
resolveBuckets(this.negativeBuckets, scale, reset), resolveBuckets(this.negativeBuckets, scale, reset),
zeroCount, startEpochNanos,
epochNanos,
attributes,
exemplars); exemplars);
if (reset) { if (reset) {
this.sum = 0; this.sum = 0;
@ -126,7 +125,7 @@ public final class DoubleExponentialHistogramAggregator
this.max = -1; this.max = -1;
this.count = 0; this.count = 0;
} }
return acc; return point;
} }
private static ExponentialHistogramBuckets resolveBuckets( private static ExponentialHistogramBuckets resolveBuckets(
@ -176,7 +175,8 @@ public final class DoubleExponentialHistogramAggregator
// Record; If recording fails, calculate scale reduction and scale down to fit new value. // Record; If recording fails, calculate scale reduction and scale down to fit new value.
// 2nd attempt at recording should work with new scale // 2nd attempt at recording should work with new scale
// TODO: We should experiment with downscale on demand during sync execution and only // TODO: We should experiment with downscale on demand during sync execution and only
// unifying scale factor between positive/negative at collection time (doAccumulate). // unifying scale factor between positive/negative at collection time
// (doAggregateThenMaybeReset).
if (!buckets.record(value)) { if (!buckets.record(value)) {
// getScaleReduction() used with downScale() will scale down as required to record value, // getScaleReduction() used with downScale() will scale down as required to record value,
// fit inside max allowed buckets, and make sure index can be represented by int. // fit inside max allowed buckets, and make sure index can be represented by int.

View File

@ -9,14 +9,17 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.Measurement;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -36,7 +39,7 @@ import javax.annotation.concurrent.ThreadSafe;
*/ */
@ThreadSafe @ThreadSafe
public final class DoubleLastValueAggregator public final class DoubleLastValueAggregator
implements Aggregator<DoubleAccumulation, DoubleExemplarData> { implements Aggregator<DoublePointData, DoubleExemplarData> {
private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier; private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier;
public DoubleLastValueAggregator( public DoubleLastValueAggregator(
@ -45,25 +48,31 @@ public final class DoubleLastValueAggregator
} }
@Override @Override
public AggregatorHandle<DoubleAccumulation, DoubleExemplarData> createHandle() { public AggregatorHandle<DoublePointData, DoubleExemplarData> createHandle() {
return new Handle(reservoirSupplier.get()); return new Handle(reservoirSupplier.get());
} }
@Override @Override
public DoubleAccumulation diff(DoubleAccumulation previous, DoubleAccumulation current) { public DoublePointData diff(DoublePointData previous, DoublePointData current) {
return current; return current;
} }
@Override
public DoublePointData toPoint(Measurement measurement) {
return ImmutableDoublePointData.create(
measurement.startEpochNanos(),
measurement.epochNanos(),
measurement.attributes(),
measurement.doubleValue());
}
@Override @Override
public MetricData toMetricData( public MetricData toMetricData(
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor descriptor, MetricDescriptor descriptor,
Map<Attributes, DoubleAccumulation> accumulationByLabels, Collection<DoublePointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpochNanos,
long epochNanos) {
// Gauge does not need a start time, but we send one as advised by the data model // Gauge does not need a start time, but we send one as advised by the data model
// for identifying resets. // for identifying resets.
return ImmutableMetricData.createDoubleGauge( return ImmutableMetricData.createDoubleGauge(
@ -72,16 +81,10 @@ public final class DoubleLastValueAggregator
descriptor.getName(), descriptor.getName(),
descriptor.getDescription(), descriptor.getDescription(),
descriptor.getSourceInstrument().getUnit(), descriptor.getSourceInstrument().getUnit(),
ImmutableGaugeData.create( ImmutableGaugeData.create(points));
MetricDataUtils.toDoublePointList(
accumulationByLabels,
(temporality == AggregationTemporality.CUMULATIVE)
? startEpochNanos
: lastCollectionEpochNanos,
epochNanos)));
} }
static final class Handle extends AggregatorHandle<DoubleAccumulation, DoubleExemplarData> { static final class Handle extends AggregatorHandle<DoublePointData, DoubleExemplarData> {
@Nullable private static final Double DEFAULT_VALUE = null; @Nullable private static final Double DEFAULT_VALUE = null;
private final AtomicReference<Double> current = new AtomicReference<>(DEFAULT_VALUE); private final AtomicReference<Double> current = new AtomicReference<>(DEFAULT_VALUE);
@ -90,12 +93,15 @@ public final class DoubleLastValueAggregator
} }
@Override @Override
protected DoubleAccumulation doAccumulateThenMaybeReset( protected DoublePointData doAggregateThenMaybeReset(
List<DoubleExemplarData> exemplars, boolean reset) { long startEpochNanos,
if (reset) { long epochNanos,
return DoubleAccumulation.create(this.current.getAndSet(DEFAULT_VALUE), exemplars); Attributes attributes,
} List<DoubleExemplarData> exemplars,
return DoubleAccumulation.create(Objects.requireNonNull(this.current.get()), exemplars); boolean reset) {
Double value = reset ? this.current.getAndSet(DEFAULT_VALUE) : this.current.get();
return ImmutableDoublePointData.create(
startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars);
} }
@Override @Override

View File

@ -6,21 +6,23 @@
package io.opentelemetry.sdk.metrics.internal.aggregator; package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.concurrent.AdderUtil; import io.opentelemetry.sdk.metrics.internal.concurrent.AdderUtil;
import io.opentelemetry.sdk.metrics.internal.concurrent.DoubleAdder; import io.opentelemetry.sdk.metrics.internal.concurrent.DoubleAdder;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.Measurement;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.function.Supplier; import java.util.function.Supplier;
/** /**
@ -30,7 +32,7 @@ import java.util.function.Supplier;
* at any time. * at any time.
*/ */
public final class DoubleSumAggregator public final class DoubleSumAggregator
extends AbstractSumAggregator<DoubleAccumulation, DoubleExemplarData> { extends AbstractSumAggregator<DoublePointData, DoubleExemplarData> {
private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier; private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier;
/** /**
@ -48,21 +50,27 @@ public final class DoubleSumAggregator
} }
@Override @Override
public AggregatorHandle<DoubleAccumulation, DoubleExemplarData> createHandle() { public AggregatorHandle<DoublePointData, DoubleExemplarData> createHandle() {
return new Handle(reservoirSupplier.get()); return new Handle(reservoirSupplier.get());
} }
@Override @Override
public DoubleAccumulation accumulateDoubleMeasurement( public DoublePointData diff(DoublePointData previousPoint, DoublePointData currentPoint) {
double value, Attributes attributes, Context context) { return ImmutableDoublePointData.create(
return DoubleAccumulation.create(value); currentPoint.getStartEpochNanos(),
currentPoint.getEpochNanos(),
currentPoint.getAttributes(),
currentPoint.getValue() - previousPoint.getValue(),
currentPoint.getExemplars());
} }
@Override @Override
public DoubleAccumulation diff( public DoublePointData toPoint(Measurement measurement) {
DoubleAccumulation previousAccumulation, DoubleAccumulation accumulation) { return ImmutableDoublePointData.create(
return DoubleAccumulation.create( measurement.startEpochNanos(),
accumulation.getValue() - previousAccumulation.getValue(), accumulation.getExemplars()); measurement.epochNanos(),
measurement.attributes(),
measurement.doubleValue());
} }
@Override @Override
@ -70,29 +78,18 @@ public final class DoubleSumAggregator
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor descriptor, MetricDescriptor descriptor,
Map<Attributes, DoubleAccumulation> accumulationByLabels, Collection<DoublePointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
return ImmutableMetricData.createDoubleSum( return ImmutableMetricData.createDoubleSum(
resource, resource,
instrumentationScopeInfo, instrumentationScopeInfo,
descriptor.getName(), descriptor.getName(),
descriptor.getDescription(), descriptor.getDescription(),
descriptor.getSourceInstrument().getUnit(), descriptor.getSourceInstrument().getUnit(),
ImmutableSumData.create( ImmutableSumData.create(isMonotonic(), temporality, points));
isMonotonic(),
temporality,
MetricDataUtils.toDoublePointList(
accumulationByLabels,
temporality == AggregationTemporality.CUMULATIVE
? startEpochNanos
: lastCollectionEpoch,
epochNanos)));
} }
static final class Handle extends AggregatorHandle<DoubleAccumulation, DoubleExemplarData> { static final class Handle extends AggregatorHandle<DoublePointData, DoubleExemplarData> {
private final DoubleAdder current = AdderUtil.createDoubleAdder(); private final DoubleAdder current = AdderUtil.createDoubleAdder();
Handle(ExemplarReservoir<DoubleExemplarData> exemplarReservoir) { Handle(ExemplarReservoir<DoubleExemplarData> exemplarReservoir) {
@ -100,12 +97,15 @@ public final class DoubleSumAggregator
} }
@Override @Override
protected DoubleAccumulation doAccumulateThenMaybeReset( protected DoublePointData doAggregateThenMaybeReset(
List<DoubleExemplarData> exemplars, boolean reset) { long startEpochNanos,
if (reset) { long epochNanos,
return DoubleAccumulation.create(this.current.sumThenReset(), exemplars); Attributes attributes,
} List<DoubleExemplarData> exemplars,
return DoubleAccumulation.create(this.current.sum(), exemplars); boolean reset) {
double value = reset ? this.current.sumThenReset() : this.current.sum();
return ImmutableDoublePointData.create(
startEpochNanos, epochNanos, attributes, value, exemplars);
} }
@Override @Override

View File

@ -9,12 +9,15 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* A "null object" Aggregator which denotes no aggregation should occur. * A "null object" Aggregator which denotes no aggregation should occur.
@ -22,31 +25,56 @@ import java.util.Map;
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change * <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time. * at any time.
*/ */
public final class DropAggregator implements Aggregator<Object, DoubleExemplarData> { public final class DropAggregator implements Aggregator<PointData, DoubleExemplarData> {
private static final Object ACCUMULATION = new Object(); private static final PointData POINT_DATA =
new PointData() {
@Override
public long getStartEpochNanos() {
return 0;
}
public static final Aggregator<Object, DoubleExemplarData> INSTANCE = new DropAggregator(); @Override
public long getEpochNanos() {
return 0;
}
@Override
public Attributes getAttributes() {
return Attributes.empty();
}
@Override
public List<? extends ExemplarData> getExemplars() {
return Collections.emptyList();
}
};
public static final Aggregator<PointData, DoubleExemplarData> INSTANCE = new DropAggregator();
private static final AggregatorHandle<PointData, DoubleExemplarData> HANDLE =
new AggregatorHandle<PointData, DoubleExemplarData>(ExemplarReservoir.doubleNoSamples()) {
@Override
protected PointData doAggregateThenMaybeReset(
long startEpochNanos,
long epochNanos,
Attributes attributes,
List<DoubleExemplarData> exemplars,
boolean reset) {
return POINT_DATA;
}
private static final AggregatorHandle<Object, DoubleExemplarData> HANDLE =
new AggregatorHandle<Object, DoubleExemplarData>(ExemplarReservoir.doubleNoSamples()) {
@Override @Override
protected void doRecordLong(long value) {} protected void doRecordLong(long value) {}
@Override @Override
protected void doRecordDouble(double value) {} protected void doRecordDouble(double value) {}
@Override
protected Object doAccumulateThenMaybeReset(
List<DoubleExemplarData> exemplars, boolean reset) {
return ACCUMULATION;
}
}; };
private DropAggregator() {} private DropAggregator() {}
@Override @Override
public AggregatorHandle<Object, DoubleExemplarData> createHandle() { public AggregatorHandle<PointData, DoubleExemplarData> createHandle() {
return HANDLE; return HANDLE;
} }
@ -54,12 +82,9 @@ public final class DropAggregator implements Aggregator<Object, DoubleExemplarDa
public MetricData toMetricData( public MetricData toMetricData(
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor descriptor, MetricDescriptor metricDescriptor,
Map<Attributes, Object> accumulationByLabels, Collection<PointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
return EmptyMetricData.getInstance(); return EmptyMetricData.getInstance();
} }
} }

View File

@ -1,74 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.aggregator;
import com.google.auto.value.AutoValue;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
@Immutable
@AutoValue
abstract class ExplicitBucketHistogramAccumulation {
/**
* Creates a new {@link ExplicitBucketHistogramAccumulation} with the given values. Assume
* `counts` is read-only so we don't need a defensive-copy here.
*
* @return a new {@link ExplicitBucketHistogramAccumulation} with the given values.
*/
static ExplicitBucketHistogramAccumulation create(
double sum, boolean hasMinMax, double min, double max, long[] counts) {
return create(sum, hasMinMax, min, max, counts, Collections.emptyList());
}
static ExplicitBucketHistogramAccumulation create(
double sum,
boolean hasMinMax,
double min,
double max,
long[] counts,
List<DoubleExemplarData> exemplars) {
return new AutoValue_ExplicitBucketHistogramAccumulation(
sum, hasMinMax, min, max, counts, exemplars);
}
ExplicitBucketHistogramAccumulation() {}
/**
* The sum of all measurements recorded.
*
* @return the sum of recorded measurements.
*/
abstract double getSum();
/** Return {@code true} if {@link #getMin()} and {@link #getMax()} is set. */
abstract boolean hasMinMax();
/**
* The min of all measurements recorded, if {@link #hasMinMax()} is {@code true}. If {@link
* #hasMinMax()} is {@code false}, the response should be ignored.
*/
abstract double getMin();
/**
* The max of all measurements recorded, if {@link #hasMinMax()} is {@code true}. If {@link
* #hasMinMax()} is {@code false}, the response should be ignored.
*/
abstract double getMax();
/**
* The counts in each bucket. The returned type is a mutable object, but it should be fine because
* the class is only used internally.
*
* @return the counts in each bucket. <b>do not mutate</b> the returned object.
*/
@SuppressWarnings("mutable")
abstract long[] getCounts();
/** Exemplars accumulated during this period. */
abstract List<DoubleExemplarData> getExemplars();
}

View File

@ -1,49 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.aggregator;
import com.google.auto.value.AutoValue;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import java.util.List;
@AutoValue
abstract class ExponentialHistogramAccumulation {
ExponentialHistogramAccumulation() {}
/** Creates a new {@link ExponentialHistogramAccumulation} with the given values. */
static ExponentialHistogramAccumulation create(
int scale,
double sum,
boolean hasMinMax,
double min,
double max,
ExponentialHistogramBuckets positiveBuckets,
ExponentialHistogramBuckets negativeBuckets,
long zeroCount,
List<DoubleExemplarData> exemplars) {
return new AutoValue_ExponentialHistogramAccumulation(
scale, sum, hasMinMax, min, max, positiveBuckets, negativeBuckets, zeroCount, exemplars);
}
abstract int getScale();
abstract double getSum();
abstract boolean hasMinMax();
abstract double getMin();
abstract double getMax();
abstract ExponentialHistogramBuckets getPositiveBuckets();
abstract ExponentialHistogramBuckets getNegativeBuckets();
abstract long getZeroCount();
abstract List<DoubleExemplarData> getExemplars();
}

View File

@ -1,34 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.aggregator;
import com.google.auto.value.AutoValue;
import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import java.util.Collections;
import java.util.List;
import javax.annotation.concurrent.Immutable;
/** An accumulation representing {@code long} values and exemplars. */
@Immutable
@AutoValue
abstract class LongAccumulation {
static LongAccumulation create(long value, List<LongExemplarData> exemplars) {
return new AutoValue_LongAccumulation(value, exemplars);
}
static LongAccumulation create(long value) {
return create(value, Collections.emptyList());
}
LongAccumulation() {}
/** The current value. */
abstract long getValue();
/** Sampled measurements recorded during this accumulation. */
abstract List<LongExemplarData> getExemplars();
}

View File

@ -9,14 +9,17 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.Measurement;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -33,8 +36,7 @@ import javax.annotation.Nullable;
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change * <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time. * at any time.
*/ */
public final class LongLastValueAggregator public final class LongLastValueAggregator implements Aggregator<LongPointData, LongExemplarData> {
implements Aggregator<LongAccumulation, LongExemplarData> {
private final Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier; private final Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier;
public LongLastValueAggregator(Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier) { public LongLastValueAggregator(Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier) {
@ -42,25 +44,31 @@ public final class LongLastValueAggregator
} }
@Override @Override
public AggregatorHandle<LongAccumulation, LongExemplarData> createHandle() { public AggregatorHandle<LongPointData, LongExemplarData> createHandle() {
return new Handle(reservoirSupplier.get()); return new Handle(reservoirSupplier.get());
} }
@Override @Override
public LongAccumulation diff(LongAccumulation previous, LongAccumulation current) { public LongPointData diff(LongPointData previous, LongPointData current) {
return current; return current;
} }
@Override
public LongPointData toPoint(Measurement measurement) {
return ImmutableLongPointData.create(
measurement.startEpochNanos(),
measurement.epochNanos(),
measurement.attributes(),
measurement.longValue());
}
@Override @Override
public MetricData toMetricData( public MetricData toMetricData(
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor descriptor, MetricDescriptor descriptor,
Map<Attributes, LongAccumulation> accumulationByLabels, Collection<LongPointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
// Last-Value ignores temporality generally, but we can set a start time on the gauge. // Last-Value ignores temporality generally, but we can set a start time on the gauge.
return ImmutableMetricData.createLongGauge( return ImmutableMetricData.createLongGauge(
resource, resource,
@ -68,16 +76,10 @@ public final class LongLastValueAggregator
descriptor.getName(), descriptor.getName(),
descriptor.getDescription(), descriptor.getDescription(),
descriptor.getSourceInstrument().getUnit(), descriptor.getSourceInstrument().getUnit(),
ImmutableGaugeData.create( ImmutableGaugeData.create(points));
MetricDataUtils.toLongPointList(
accumulationByLabels,
(temporality == AggregationTemporality.CUMULATIVE)
? startEpochNanos
: lastCollectionEpoch,
epochNanos)));
} }
static final class Handle extends AggregatorHandle<LongAccumulation, LongExemplarData> { static final class Handle extends AggregatorHandle<LongPointData, LongExemplarData> {
@Nullable private static final Long DEFAULT_VALUE = null; @Nullable private static final Long DEFAULT_VALUE = null;
private final AtomicReference<Long> current = new AtomicReference<>(DEFAULT_VALUE); private final AtomicReference<Long> current = new AtomicReference<>(DEFAULT_VALUE);
@ -86,12 +88,15 @@ public final class LongLastValueAggregator
} }
@Override @Override
protected LongAccumulation doAccumulateThenMaybeReset( protected LongPointData doAggregateThenMaybeReset(
List<LongExemplarData> exemplars, boolean reset) { long startEpochNanos,
if (reset) { long epochNanos,
return LongAccumulation.create(this.current.getAndSet(DEFAULT_VALUE), exemplars); Attributes attributes,
} List<LongExemplarData> exemplars,
return LongAccumulation.create(Objects.requireNonNull(this.current.get()), exemplars); boolean reset) {
Long value = reset ? this.current.getAndSet(DEFAULT_VALUE) : this.current.get();
return ImmutableLongPointData.create(
startEpochNanos, epochNanos, attributes, Objects.requireNonNull(value), exemplars);
} }
@Override @Override

View File

@ -9,17 +9,20 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.concurrent.AdderUtil; import io.opentelemetry.sdk.metrics.internal.concurrent.AdderUtil;
import io.opentelemetry.sdk.metrics.internal.concurrent.LongAdder; import io.opentelemetry.sdk.metrics.internal.concurrent.LongAdder;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableMetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableSumData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.metrics.internal.state.Measurement;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.function.Supplier; import java.util.function.Supplier;
/** /**
@ -29,7 +32,7 @@ import java.util.function.Supplier;
* at any time. * at any time.
*/ */
public final class LongSumAggregator public final class LongSumAggregator
extends AbstractSumAggregator<LongAccumulation, LongExemplarData> { extends AbstractSumAggregator<LongPointData, LongExemplarData> {
private final Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier; private final Supplier<ExemplarReservoir<LongExemplarData>> reservoirSupplier;
@ -41,15 +44,27 @@ public final class LongSumAggregator
} }
@Override @Override
public AggregatorHandle<LongAccumulation, LongExemplarData> createHandle() { public AggregatorHandle<LongPointData, LongExemplarData> createHandle() {
return new Handle(reservoirSupplier.get()); return new Handle(reservoirSupplier.get());
} }
@Override @Override
public LongAccumulation diff( public LongPointData diff(LongPointData previousPoint, LongPointData currentPoint) {
LongAccumulation previousAccumulation, LongAccumulation accumulation) { return ImmutableLongPointData.create(
return LongAccumulation.create( currentPoint.getStartEpochNanos(),
accumulation.getValue() - previousAccumulation.getValue(), accumulation.getExemplars()); currentPoint.getEpochNanos(),
currentPoint.getAttributes(),
currentPoint.getValue() - previousPoint.getValue(),
currentPoint.getExemplars());
}
@Override
public LongPointData toPoint(Measurement measurement) {
return ImmutableLongPointData.create(
measurement.startEpochNanos(),
measurement.epochNanos(),
measurement.attributes(),
measurement.longValue());
} }
@Override @Override
@ -57,29 +72,18 @@ public final class LongSumAggregator
Resource resource, Resource resource,
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
MetricDescriptor descriptor, MetricDescriptor descriptor,
Map<Attributes, LongAccumulation> accumulationByLabels, Collection<LongPointData> points,
AggregationTemporality temporality, AggregationTemporality temporality) {
long startEpochNanos,
long lastCollectionEpoch,
long epochNanos) {
return ImmutableMetricData.createLongSum( return ImmutableMetricData.createLongSum(
resource, resource,
instrumentationScopeInfo, instrumentationScopeInfo,
descriptor.getName(), descriptor.getName(),
descriptor.getDescription(), descriptor.getDescription(),
descriptor.getSourceInstrument().getUnit(), descriptor.getSourceInstrument().getUnit(),
ImmutableSumData.create( ImmutableSumData.create(isMonotonic(), temporality, points));
isMonotonic(),
temporality,
MetricDataUtils.toLongPointList(
accumulationByLabels,
temporality == AggregationTemporality.CUMULATIVE
? startEpochNanos
: lastCollectionEpoch,
epochNanos)));
} }
static final class Handle extends AggregatorHandle<LongAccumulation, LongExemplarData> { static final class Handle extends AggregatorHandle<LongPointData, LongExemplarData> {
private final LongAdder current = AdderUtil.createLongAdder(); private final LongAdder current = AdderUtil.createLongAdder();
Handle(ExemplarReservoir<LongExemplarData> exemplarReservoir) { Handle(ExemplarReservoir<LongExemplarData> exemplarReservoir) {
@ -87,12 +91,15 @@ public final class LongSumAggregator
} }
@Override @Override
protected LongAccumulation doAccumulateThenMaybeReset( protected LongPointData doAggregateThenMaybeReset(
List<LongExemplarData> exemplars, boolean reset) { long startEpochNanos,
if (reset) { long epochNanos,
return LongAccumulation.create(this.current.sumThenReset(), exemplars); Attributes attributes,
} List<LongExemplarData> exemplars,
return LongAccumulation.create(this.current.sum(), exemplars); boolean reset) {
long value = reset ? this.current.sumThenReset() : this.current.sum();
return ImmutableLongPointData.create(
startEpochNanos, epochNanos, attributes, value, exemplars);
} }
@Override @Override

View File

@ -1,111 +0,0 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.aggregator;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.internal.PrimitiveLongList;
import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
final class MetricDataUtils {
private MetricDataUtils() {}
/** Returns true if the instrument does not allow negative measurements. */
static boolean isMonotonicInstrument(InstrumentDescriptor descriptor) {
InstrumentType type = descriptor.getType();
return type == InstrumentType.HISTOGRAM
|| type == InstrumentType.COUNTER
|| type == InstrumentType.OBSERVABLE_COUNTER;
}
static List<LongPointData> toLongPointList(
Map<Attributes, LongAccumulation> accumulationMap, long startEpochNanos, long epochNanos) {
List<LongPointData> points = new ArrayList<>(accumulationMap.size());
accumulationMap.forEach(
(labels, accumulation) ->
points.add(
ImmutableLongPointData.create(
startEpochNanos,
epochNanos,
labels,
accumulation.getValue(),
accumulation.getExemplars())));
return points;
}
static List<DoublePointData> toDoublePointList(
Map<Attributes, DoubleAccumulation> accumulationMap, long startEpochNanos, long epochNanos) {
List<DoublePointData> points = new ArrayList<>(accumulationMap.size());
accumulationMap.forEach(
(labels, accumulation) ->
points.add(
ImmutableDoublePointData.create(
startEpochNanos,
epochNanos,
labels,
accumulation.getValue(),
accumulation.getExemplars())));
return points;
}
static List<HistogramPointData> toExplicitBucketHistogramPointList(
Map<Attributes, ExplicitBucketHistogramAccumulation> accumulationMap,
long startEpochNanos,
long epochNanos,
List<Double> boundaries) {
List<HistogramPointData> points = new ArrayList<>(accumulationMap.size());
accumulationMap.forEach(
(labels, aggregator) -> {
List<Long> counts = PrimitiveLongList.wrap(aggregator.getCounts().clone());
points.add(
ImmutableHistogramPointData.create(
startEpochNanos,
epochNanos,
labels,
aggregator.getSum(),
aggregator.getMin(),
aggregator.getMax(),
boundaries,
counts,
aggregator.getExemplars()));
});
return points;
}
static List<ExponentialHistogramPointData> toExponentialHistogramPointList(
Map<Attributes, ExponentialHistogramAccumulation> accumulationMap,
long startEpochNanos,
long epochNanos) {
List<ExponentialHistogramPointData> points = new ArrayList<>(accumulationMap.size());
accumulationMap.forEach(
(attributes, aggregator) ->
points.add(
ImmutableExponentialHistogramPointData.create(
aggregator.getScale(),
aggregator.getSum(),
aggregator.getZeroCount(),
aggregator.getMin(),
aggregator.getMax(),
aggregator.getPositiveBuckets(),
aggregator.getNegativeBuckets(),
startEpochNanos,
epochNanos,
attributes,
aggregator.getExemplars())));
return points;
}
}

View File

@ -15,6 +15,7 @@ import io.opentelemetry.sdk.metrics.View;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
@ -35,7 +36,8 @@ import java.util.logging.Logger;
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change * <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time. * at any time.
*/ */
final class AsynchronousMetricStorage<T, U extends ExemplarData> implements MetricStorage { final class AsynchronousMetricStorage<T extends PointData, U extends ExemplarData>
implements MetricStorage {
private static final Logger logger = Logger.getLogger(AsynchronousMetricStorage.class.getName()); private static final Logger logger = Logger.getLogger(AsynchronousMetricStorage.class.getName());
private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger); private final ThrottlingLogger throttlingLogger = new ThrottlingLogger(logger);
@ -44,8 +46,8 @@ final class AsynchronousMetricStorage<T, U extends ExemplarData> implements Metr
private final AggregationTemporality aggregationTemporality; private final AggregationTemporality aggregationTemporality;
private final Aggregator<T, U> aggregator; private final Aggregator<T, U> aggregator;
private final AttributesProcessor attributesProcessor; private final AttributesProcessor attributesProcessor;
private Map<Attributes, T> accumulations = new HashMap<>(); private Map<Attributes, T> points = new HashMap<>();
private Map<Attributes, T> lastAccumulations = private Map<Attributes, T> lastPoints =
new HashMap<>(); // Only populated if aggregationTemporality == DELTA new HashMap<>(); // Only populated if aggregationTemporality == DELTA
private AsynchronousMetricStorage( private AsynchronousMetricStorage(
@ -67,7 +69,7 @@ final class AsynchronousMetricStorage<T, U extends ExemplarData> implements Metr
* Create an asynchronous storage instance for the {@link View} and {@link InstrumentDescriptor}. * Create an asynchronous storage instance for the {@link View} and {@link InstrumentDescriptor}.
*/ */
// TODO(anuraaga): The cast to generic type here looks suspicious. // TODO(anuraaga): The cast to generic type here looks suspicious.
static <T, U extends ExemplarData> AsynchronousMetricStorage<T, U> create( static <T extends PointData, U extends ExemplarData> AsynchronousMetricStorage<T, U> create(
RegisteredReader registeredReader, RegisteredReader registeredReader,
RegisteredView registeredView, RegisteredView registeredView,
InstrumentDescriptor instrumentDescriptor) { InstrumentDescriptor instrumentDescriptor) {
@ -84,38 +86,42 @@ final class AsynchronousMetricStorage<T, U extends ExemplarData> implements Metr
registeredView.getViewAttributesProcessor()); registeredView.getViewAttributesProcessor());
} }
/** Record callback long measurements from {@link ObservableLongMeasurement}. */ /**
void recordLong(long value, Attributes attributes) { * Record callback measurement from {@link ObservableLongMeasurement} or {@link
T accumulation = aggregator.accumulateLongMeasurement(value, attributes, Context.current()); * ObservableDoubleMeasurement}.
if (accumulation != null) { */
recordAccumulation(accumulation, attributes); void record(Measurement measurement) {
} Context context = Context.current();
Attributes processedAttributes = attributesProcessor.process(measurement.attributes(), context);
long start =
aggregationTemporality == AggregationTemporality.DELTA
? registeredReader.getLastCollectEpochNanos()
: measurement.startEpochNanos();
measurement =
measurement.hasDoubleValue()
? Measurement.doubleMeasurement(
start, measurement.epochNanos(), measurement.doubleValue(), processedAttributes)
: Measurement.longMeasurement(
start, measurement.epochNanos(), measurement.longValue(), processedAttributes);
recordPoint(aggregator.toPoint(measurement));
} }
/** Record callback double measurements from {@link ObservableDoubleMeasurement}. */ private void recordPoint(T point) {
void recordDouble(double value, Attributes attributes) { Attributes attributes = point.getAttributes();
T accumulation = aggregator.accumulateDoubleMeasurement(value, attributes, Context.current());
if (accumulation != null) {
recordAccumulation(accumulation, attributes);
}
}
private void recordAccumulation(T accumulation, Attributes attributes) { if (points.size() >= MetricStorage.MAX_CARDINALITY) {
Attributes processedAttributes = attributesProcessor.process(attributes, Context.current());
if (accumulations.size() >= MetricStorage.MAX_ACCUMULATIONS) {
throttlingLogger.log( throttlingLogger.log(
Level.WARNING, Level.WARNING,
"Instrument " "Instrument "
+ metricDescriptor.getSourceInstrument().getName() + metricDescriptor.getSourceInstrument().getName()
+ " has exceeded the maximum allowed accumulations (" + " has exceeded the maximum allowed cardinality ("
+ MetricStorage.MAX_ACCUMULATIONS + MetricStorage.MAX_CARDINALITY
+ ")."); + ").");
return; return;
} }
// Check there is not already a recording for the attributes // Check there is not already a recording for the attributes
if (accumulations.containsKey(attributes)) { if (points.containsKey(attributes)) {
throttlingLogger.log( throttlingLogger.log(
Level.WARNING, Level.WARNING,
"Instrument " "Instrument "
@ -124,7 +130,7 @@ final class AsynchronousMetricStorage<T, U extends ExemplarData> implements Metr
return; return;
} }
accumulations.put(processedAttributes, accumulation); points.put(attributes, point);
} }
@Override @Override
@ -145,27 +151,23 @@ final class AsynchronousMetricStorage<T, U extends ExemplarData> implements Metr
long epochNanos) { long epochNanos) {
Map<Attributes, T> result; Map<Attributes, T> result;
if (aggregationTemporality == AggregationTemporality.DELTA) { if (aggregationTemporality == AggregationTemporality.DELTA) {
Map<Attributes, T> accumulations = this.accumulations; Map<Attributes, T> points = this.points;
Map<Attributes, T> lastAccumulations = this.lastAccumulations; Map<Attributes, T> lastPoints = this.lastPoints;
lastAccumulations.entrySet().removeIf(entry -> !accumulations.containsKey(entry.getKey())); lastPoints.entrySet().removeIf(entry -> !points.containsKey(entry.getKey()));
accumulations.forEach( points.forEach(
(k, v) -> (k, v) -> lastPoints.compute(k, (k2, v2) -> v2 == null ? v : aggregator.diff(v2, v)));
lastAccumulations.compute(k, (k2, v2) -> v2 == null ? v : aggregator.diff(v2, v))); result = lastPoints;
result = lastAccumulations; this.lastPoints = points;
this.lastAccumulations = accumulations;
} else { } else {
result = accumulations; result = points;
} }
this.accumulations = new HashMap<>(); this.points = new HashMap<>();
return aggregator.toMetricData( return aggregator.toMetricData(
resource, resource,
instrumentationScopeInfo, instrumentationScopeInfo,
metricDescriptor, metricDescriptor,
result, result.values(),
aggregationTemporality, aggregationTemporality);
startEpochNanos,
registeredReader.getLastCollectEpochNanos(),
epochNanos);
} }
@Override @Override

View File

@ -70,7 +70,7 @@ public final class CallbackRegistration {
return "CallbackRegistration{instrumentDescriptors=" + instrumentDescriptors + "}"; return "CallbackRegistration{instrumentDescriptors=" + instrumentDescriptors + "}";
} }
void invokeCallback(RegisteredReader reader) { void invokeCallback(RegisteredReader reader, long startEpochNanos, long epochNanos) {
// Return early if no storages are registered // Return early if no storages are registered
if (!hasStorages) { if (!hasStorages) {
return; return;
@ -78,7 +78,8 @@ public final class CallbackRegistration {
// Set the active reader on each observable measurement so that measurements are only recorded // Set the active reader on each observable measurement so that measurements are only recorded
// to relevant storages // to relevant storages
observableMeasurements.forEach( observableMeasurements.forEach(
observableMeasurement -> observableMeasurement.setActiveReader(reader)); observableMeasurement ->
observableMeasurement.setActiveReader(reader, startEpochNanos, epochNanos));
try { try {
callback.run(); callback.run();
} catch (Throwable e) { } catch (Throwable e) {
@ -87,7 +88,7 @@ public final class CallbackRegistration {
Level.WARNING, "An exception occurred invoking callback for " + this + ".", e); Level.WARNING, "An exception occurred invoking callback for " + this + ".", e);
} finally { } finally {
observableMeasurements.forEach( observableMeasurements.forEach(
observableMeasurement -> observableMeasurement.setActiveReader(null)); observableMeasurement -> observableMeasurement.unsetActiveReader());
} }
} }
} }

View File

@ -12,6 +12,7 @@ import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorHandle; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorHandle;
import io.opentelemetry.sdk.metrics.internal.aggregator.EmptyMetricData; import io.opentelemetry.sdk.metrics.internal.aggregator.EmptyMetricData;
@ -19,7 +20,8 @@ import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader; import io.opentelemetry.sdk.metrics.internal.export.RegisteredReader;
import io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor; import io.opentelemetry.sdk.metrics.internal.view.AttributesProcessor;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.HashMap; import java.util.ArrayList;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -32,7 +34,7 @@ import java.util.logging.Logger;
* <p>This class is internal and is hence not for public use. Its APIs are unstable and can change * <p>This class is internal and is hence not for public use. Its APIs are unstable and can change
* at any time. * at any time.
*/ */
public final class DefaultSynchronousMetricStorage<T, U extends ExemplarData> public final class DefaultSynchronousMetricStorage<T extends PointData, U extends ExemplarData>
implements SynchronousMetricStorage { implements SynchronousMetricStorage {
private static final BoundStorageHandle NOOP_STORAGE_HANDLE = new NoopBoundHandle(); private static final BoundStorageHandle NOOP_STORAGE_HANDLE = new NoopBoundHandle();
@ -100,13 +102,13 @@ public final class DefaultSynchronousMetricStorage<T, U extends ExemplarData>
// Missing entry or no longer mapped. Try to add a new one if not exceeded cardinality limits. // Missing entry or no longer mapped. Try to add a new one if not exceeded cardinality limits.
aggregatorHandle = aggregator.createHandle(); aggregatorHandle = aggregator.createHandle();
while (true) { while (true) {
if (activeCollectionStorage.size() >= MAX_ACCUMULATIONS) { if (activeCollectionStorage.size() >= MAX_CARDINALITY) {
logger.log( logger.log(
Level.WARNING, Level.WARNING,
"Instrument " "Instrument "
+ metricDescriptor.getSourceInstrument().getName() + metricDescriptor.getSourceInstrument().getName()
+ " has exceeded the maximum allowed accumulations (" + " has exceeded the maximum allowed cardinality ("
+ MAX_ACCUMULATIONS + MAX_CARDINALITY
+ ")."); + ").");
return NOOP_STORAGE_HANDLE; return NOOP_STORAGE_HANDLE;
} }
@ -159,9 +161,13 @@ public final class DefaultSynchronousMetricStorage<T, U extends ExemplarData>
long startEpochNanos, long startEpochNanos,
long epochNanos) { long epochNanos) {
boolean reset = aggregationTemporality == AggregationTemporality.DELTA; boolean reset = aggregationTemporality == AggregationTemporality.DELTA;
long start =
aggregationTemporality == AggregationTemporality.DELTA
? registeredReader.getLastCollectEpochNanos()
: startEpochNanos;
// Grab accumulated measurements. // Grab aggregated points.
Map<Attributes, T> accumulations = new HashMap<>(); List<T> points = new ArrayList<>(activeCollectionStorage.size());
for (Map.Entry<Attributes, AggregatorHandle<T, U>> entry : activeCollectionStorage.entrySet()) { for (Map.Entry<Attributes, AggregatorHandle<T, U>> entry : activeCollectionStorage.entrySet()) {
if (reset) { if (reset) {
boolean unmappedEntry = entry.getValue().tryUnmap(); boolean unmappedEntry = entry.getValue().tryUnmap();
@ -171,26 +177,19 @@ public final class DefaultSynchronousMetricStorage<T, U extends ExemplarData>
activeCollectionStorage.remove(entry.getKey(), entry.getValue()); activeCollectionStorage.remove(entry.getKey(), entry.getValue());
} }
} }
T accumulation = entry.getValue().accumulateThenMaybeReset(entry.getKey(), reset); T point = entry.getValue().aggregateThenMaybeReset(start, epochNanos, entry.getKey(), reset);
if (accumulation == null) { if (point == null) {
continue; continue;
} }
accumulations.put(entry.getKey(), accumulation); points.add(point);
} }
if (accumulations.isEmpty()) { if (points.isEmpty()) {
return EmptyMetricData.getInstance(); return EmptyMetricData.getInstance();
} }
return aggregator.toMetricData( return aggregator.toMetricData(
resource, resource, instrumentationScopeInfo, metricDescriptor, points, aggregationTemporality);
instrumentationScopeInfo,
metricDescriptor,
accumulations,
aggregationTemporality,
startEpochNanos,
registeredReader.getLastCollectEpochNanos(),
epochNanos);
} }
@Override @Override

View File

@ -0,0 +1,57 @@
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.metrics.internal.state;
import com.google.auto.value.AutoValue;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.ObservableDoubleMeasurement;
import io.opentelemetry.api.metrics.ObservableLongMeasurement;
/**
* A long or double measurement recorded from {@link ObservableLongMeasurement} or {@link
* ObservableDoubleMeasurement}.
*/
@AutoValue
public abstract class Measurement {
static Measurement doubleMeasurement(
long startEpochNanos, long epochNanos, double value, Attributes attributes) {
return new AutoValue_Measurement(
startEpochNanos,
epochNanos,
/* hasLongValue= */ false,
0L,
/* hasDoubleValue= */ true,
value,
attributes);
}
static Measurement longMeasurement(
long startEpochNanos, long epochNanos, long value, Attributes attributes) {
return new AutoValue_Measurement(
startEpochNanos,
epochNanos,
/* hasLongValue= */ true,
value,
/* hasDoubleValue= */ false,
0.0,
attributes);
}
public abstract long startEpochNanos();
public abstract long epochNanos();
public abstract boolean hasLongValue();
public abstract long longValue();
public abstract boolean hasDoubleValue();
public abstract double doubleValue();
public abstract Attributes attributes();
}

View File

@ -81,7 +81,7 @@ public class MeterSharedState {
return instrumentationScopeInfo; return instrumentationScopeInfo;
} }
/** Collects all accumulated metric stream points. */ /** Collects all metrics. */
public List<MetricData> collectAll( public List<MetricData> collectAll(
RegisteredReader registeredReader, RegisteredReader registeredReader,
MeterProviderSharedState meterProviderSharedState, MeterProviderSharedState meterProviderSharedState,
@ -93,7 +93,8 @@ public class MeterSharedState {
// Collections across all readers are sequential // Collections across all readers are sequential
synchronized (collectLock) { synchronized (collectLock) {
for (CallbackRegistration callbackRegistration : currentRegisteredCallbacks) { for (CallbackRegistration callbackRegistration : currentRegisteredCallbacks) {
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(
registeredReader, meterProviderSharedState.getStartEpochNanos(), epochNanos);
} }
Collection<MetricStorage> storages = Collection<MetricStorage> storages =

View File

@ -20,8 +20,8 @@ import io.opentelemetry.sdk.resources.Resource;
*/ */
public interface MetricStorage { public interface MetricStorage {
/** The max number of metric accumulations for a particular {@link MetricStorage}. */ /** The max number of distinct metric points for a particular {@link MetricStorage}. */
int MAX_ACCUMULATIONS = 2000; int MAX_CARDINALITY = 2000;
/** Returns a description of the metric produced in this storage. */ /** Returns a description of the metric produced in this storage. */
MetricDescriptor getMetricDescriptor(); MetricDescriptor getMetricDescriptor();

View File

@ -5,6 +5,9 @@
package io.opentelemetry.sdk.metrics.internal.state; package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement;
import io.opentelemetry.api.common.Attributes; import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; import io.opentelemetry.api.metrics.ObservableDoubleMeasurement;
import io.opentelemetry.api.metrics.ObservableLongMeasurement; import io.opentelemetry.api.metrics.ObservableLongMeasurement;
@ -32,7 +35,12 @@ public final class SdkObservableMeasurement
private final InstrumentationScopeInfo instrumentationScopeInfo; private final InstrumentationScopeInfo instrumentationScopeInfo;
private final InstrumentDescriptor instrumentDescriptor; private final InstrumentDescriptor instrumentDescriptor;
private final List<AsynchronousMetricStorage<?, ?>> storages; private final List<AsynchronousMetricStorage<?, ?>> storages;
// These fields are set before invoking callbacks. They allow measurements to be recorded to the
// storages for correct reader, and with the correct time.
@Nullable private volatile RegisteredReader activeReader; @Nullable private volatile RegisteredReader activeReader;
private volatile long startEpochNanos;
private volatile long epochNanos;
private SdkObservableMeasurement( private SdkObservableMeasurement(
InstrumentationScopeInfo instrumentationScopeInfo, InstrumentationScopeInfo instrumentationScopeInfo,
@ -63,8 +71,22 @@ public final class SdkObservableMeasurement
return instrumentationScopeInfo; return instrumentationScopeInfo;
} }
public void setActiveReader(@Nullable RegisteredReader registeredReader) { /**
* Set the active reader, and clock information. {@link #unsetActiveReader()} MUST be called
* after.
*/
public void setActiveReader(
RegisteredReader registeredReader, long startEpochNanos, long epochNanos) {
this.activeReader = registeredReader; this.activeReader = registeredReader;
this.startEpochNanos = startEpochNanos;
this.epochNanos = epochNanos;
}
/**
* Unset the active reader. Called after {@link #setActiveReader(RegisteredReader, long, long)}.
*/
public void unsetActiveReader() {
this.activeReader = null;
} }
InstrumentDescriptor getInstrumentDescriptor() { InstrumentDescriptor getInstrumentDescriptor() {
@ -82,20 +104,7 @@ public final class SdkObservableMeasurement
@Override @Override
public void record(long value, Attributes attributes) { public void record(long value, Attributes attributes) {
RegisteredReader activeReader = this.activeReader; doRecord(longMeasurement(startEpochNanos, epochNanos, value, attributes));
if (activeReader == null) {
throttlingLogger.log(
Level.FINE,
"Measurement recorded for instrument "
+ instrumentDescriptor.getName()
+ " outside callback registered to instrument. Dropping measurement.");
return;
}
for (AsynchronousMetricStorage<?, ?> storage : storages) {
if (storage.getRegisteredReader().equals(activeReader)) {
storage.recordLong(value, attributes);
}
}
} }
@Override @Override
@ -105,6 +114,10 @@ public final class SdkObservableMeasurement
@Override @Override
public void record(double value, Attributes attributes) { public void record(double value, Attributes attributes) {
doRecord(doubleMeasurement(startEpochNanos, epochNanos, value, attributes));
}
private void doRecord(Measurement measurement) {
RegisteredReader activeReader = this.activeReader; RegisteredReader activeReader = this.activeReader;
if (activeReader == null) { if (activeReader == null) {
throttlingLogger.log( throttlingLogger.log(
@ -116,7 +129,7 @@ public final class SdkObservableMeasurement
} }
for (AsynchronousMetricStorage<?, ?> storage : storages) { for (AsynchronousMetricStorage<?, ?> storage : storages) {
if (storage.getRegisteredReader().equals(activeReader)) { if (storage.getRegisteredReader().equals(activeReader)) {
storage.recordDouble(value, attributes); storage.record(measurement);
} }
} }
} }

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.state;
import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.View;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
@ -35,7 +36,7 @@ public interface SynchronousMetricStorage extends MetricStorage, WriteableMetric
* @return The storage, or {@link EmptyMetricStorage#empty()} if the instrument should not be * @return The storage, or {@link EmptyMetricStorage#empty()} if the instrument should not be
* recorded. * recorded.
*/ */
static <T, U extends ExemplarData> SynchronousMetricStorage create( static <T extends PointData, U extends ExemplarData> SynchronousMetricStorage create(
RegisteredReader registeredReader, RegisteredReader registeredReader,
RegisteredView registeredView, RegisteredView registeredView,
InstrumentDescriptor instrumentDescriptor, InstrumentDescriptor instrumentDescriptor,

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.view;
import io.opentelemetry.sdk.internal.ThrottlingLogger; import io.opentelemetry.sdk.internal.ThrottlingLogger;
import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
@ -51,7 +52,7 @@ public final class DefaultAggregation implements Aggregation, AggregatorFactory
} }
@Override @Override
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
return ((AggregatorFactory) resolve(instrumentDescriptor)) return ((AggregatorFactory) resolve(instrumentDescriptor))
.createAggregator(instrumentDescriptor, exemplarFilter); .createAggregator(instrumentDescriptor, exemplarFilter);

View File

@ -7,6 +7,7 @@ package io.opentelemetry.sdk.metrics.internal.view;
import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
@ -30,7 +31,7 @@ public final class DropAggregation implements Aggregation, AggregatorFactory {
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
return (Aggregator<T, U>) Aggregator.drop(); return (Aggregator<T, U>) Aggregator.drop();
} }

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.view;
import io.opentelemetry.sdk.common.Clock; import io.opentelemetry.sdk.common.Clock;
import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleExplicitBucketHistogramAggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleExplicitBucketHistogramAggregator;
@ -48,7 +49,7 @@ public final class ExplicitBucketHistogramAggregation implements Aggregation, Ag
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
return (Aggregator<T, U>) return (Aggregator<T, U>)
new DoubleExplicitBucketHistogramAggregator( new DoubleExplicitBucketHistogramAggregator(

View File

@ -12,6 +12,7 @@ import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleExponentialHistogramAggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleExponentialHistogramAggregator;
@ -64,7 +65,7 @@ public final class ExponentialHistogramAggregation implements Aggregation, Aggre
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
return (Aggregator<T, U>) return (Aggregator<T, U>)
new DoubleExponentialHistogramAggregator( new DoubleExponentialHistogramAggregator(

View File

@ -8,6 +8,7 @@ package io.opentelemetry.sdk.metrics.internal.view;
import io.opentelemetry.sdk.metrics.Aggregation; import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleLastValueAggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleLastValueAggregator;
@ -34,7 +35,7 @@ public final class LastValueAggregation implements Aggregation, AggregatorFactor
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
// For the initial version we do not sample exemplars on gauges. // For the initial version we do not sample exemplars on gauges.

View File

@ -11,6 +11,7 @@ import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleSumAggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleSumAggregator;
@ -37,7 +38,7 @@ public final class SumAggregation implements Aggregation, AggregatorFactory {
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T, U extends ExemplarData> Aggregator<T, U> createAggregator( public <T extends PointData, U extends ExemplarData> Aggregator<T, U> createAggregator(
InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) { InstrumentDescriptor instrumentDescriptor, ExemplarFilter exemplarFilter) {
switch (instrumentDescriptor.getValueType()) { switch (instrumentDescriptor.getValueType()) {
case LONG: case LONG:

View File

@ -26,8 +26,8 @@ import org.junit.jupiter.api.Test;
@SuppressLogger(DefaultSynchronousMetricStorage.class) @SuppressLogger(DefaultSynchronousMetricStorage.class)
class CardinalityTest { class CardinalityTest {
/** Traces {@code MetricStorageUtils#MAX_ACCUMULATIONS}. */ /** Traces {@code MetricStorageUtils#MAX_CARDINALITY}. */
private static final int MAX_ACCUMULATIONS = 2000; private static final int MAX_CARDINALITY = 2000;
private InMemoryMetricReader deltaReader; private InMemoryMetricReader deltaReader;
private InMemoryMetricReader cumulativeReader; private InMemoryMetricReader cumulativeReader;
@ -159,13 +159,13 @@ class CardinalityTest {
/** /**
* Records to sync instruments, many distinct attributes. Validates that the {@code * Records to sync instruments, many distinct attributes. Validates that the {@code
* MetricStorageUtils#MAX_ACCUMULATIONS} is enforced for each instrument. * MetricStorageUtils#MAX_CARDINALITY} is enforced for each instrument.
*/ */
@Test @Test
void cardinalityLimits_synchronousInstrument() { void cardinalityLimits_synchronousInstrument() {
LongCounter syncCounter1 = meter.counterBuilder("sync-counter1").build(); LongCounter syncCounter1 = meter.counterBuilder("sync-counter1").build();
LongCounter syncCounter2 = meter.counterBuilder("sync-counter2").build(); LongCounter syncCounter2 = meter.counterBuilder("sync-counter2").build();
for (int i = 0; i < MAX_ACCUMULATIONS + 1; i++) { for (int i = 0; i < MAX_CARDINALITY + 1; i++) {
syncCounter1.add(1, Attributes.builder().put("key", "value" + i).build()); syncCounter1.add(1, Attributes.builder().put("key", "value" + i).build());
syncCounter2.add(1, Attributes.builder().put("key", "value" + i).build()); syncCounter2.add(1, Attributes.builder().put("key", "value" + i).build());
} }
@ -183,7 +183,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))), .isEqualTo(MAX_CARDINALITY))),
metricData -> metricData ->
assertThat(metricData) assertThat(metricData)
.hasName("sync-counter2") .hasName("sync-counter2")
@ -194,7 +194,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS)))); .isEqualTo(MAX_CARDINALITY))));
assertThat(cumulativeReader.collectAllMetrics()) assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection") .as("Cumulative collection")
@ -209,7 +209,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))), .isEqualTo(MAX_CARDINALITY))),
metricData -> metricData ->
assertThat(metricData) assertThat(metricData)
.hasName("sync-counter2") .hasName("sync-counter2")
@ -220,18 +220,18 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS)))); .isEqualTo(MAX_CARDINALITY))));
} }
/** /**
* Records to sync instruments, many distinct attributes. Validates that the {@code * Records to sync instruments, many distinct attributes. Validates that the {@code
* MetricStorageUtils#MAX_ACCUMULATIONS} is enforced for each instrument. * MetricStorageUtils#MAX_CARDINALITY} is enforced for each instrument.
*/ */
@Test @Test
void cardinalityLimits_asynchronousInstrument() { void cardinalityLimits_asynchronousInstrument() {
Consumer<ObservableLongMeasurement> callback = Consumer<ObservableLongMeasurement> callback =
measurement -> { measurement -> {
for (int i = 0; i < MAX_ACCUMULATIONS + 1; i++) { for (int i = 0; i < MAX_CARDINALITY + 1; i++) {
measurement.record(1, Attributes.builder().put("key", "value" + i).build()); measurement.record(1, Attributes.builder().put("key", "value" + i).build());
} }
}; };
@ -251,7 +251,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))), .isEqualTo(MAX_CARDINALITY))),
metricData -> metricData ->
assertThat(metricData) assertThat(metricData)
.hasName("async-counter2") .hasName("async-counter2")
@ -262,7 +262,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS)))); .isEqualTo(MAX_CARDINALITY))));
assertThat(cumulativeReader.collectAllMetrics()) assertThat(cumulativeReader.collectAllMetrics())
.as("Cumulative collection") .as("Cumulative collection")
@ -277,7 +277,7 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS))), .isEqualTo(MAX_CARDINALITY))),
metricData -> metricData ->
assertThat(metricData) assertThat(metricData)
.hasName("async-counter2") .hasName("async-counter2")
@ -288,6 +288,6 @@ class CardinalityTest {
(Consumer<SumData<LongPointData>>) (Consumer<SumData<LongPointData>>)
sumPointData -> sumPointData ->
assertThat(sumPointData.getPoints().size()) assertThat(sumPointData.getPoints().size())
.isEqualTo(MAX_ACCUMULATIONS)))); .isEqualTo(MAX_CARDINALITY))));
} }
} }

View File

@ -16,6 +16,7 @@ import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExemplarData; import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.PointData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import java.util.Collections; import java.util.Collections;
@ -98,7 +99,7 @@ class AggregatorHandleTest {
assertThat(testAggregator.recordedLong.get()).isEqualTo(22); assertThat(testAggregator.recordedLong.get()).isEqualTo(22);
assertThat(testAggregator.recordedDouble.get()).isEqualTo(0); assertThat(testAggregator.recordedDouble.get()).isEqualTo(0);
testAggregator.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); testAggregator.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
assertThat(testAggregator.recordedLong.get()).isEqualTo(0); assertThat(testAggregator.recordedLong.get()).isEqualTo(0);
assertThat(testAggregator.recordedDouble.get()).isEqualTo(0); assertThat(testAggregator.recordedDouble.get()).isEqualTo(0);
@ -106,7 +107,7 @@ class AggregatorHandleTest {
assertThat(testAggregator.recordedLong.get()).isEqualTo(0); assertThat(testAggregator.recordedLong.get()).isEqualTo(0);
assertThat(testAggregator.recordedDouble.get()).isEqualTo(33.55); assertThat(testAggregator.recordedDouble.get()).isEqualTo(33.55);
testAggregator.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); testAggregator.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
assertThat(testAggregator.recordedLong.get()).isEqualTo(0); assertThat(testAggregator.recordedLong.get()).isEqualTo(0);
assertThat(testAggregator.recordedDouble.get()).isEqualTo(0); assertThat(testAggregator.recordedDouble.get()).isEqualTo(0);
} }
@ -148,12 +149,12 @@ class AggregatorHandleTest {
testAggregator.recordDouble(1.0, Attributes.empty(), Context.root()); testAggregator.recordDouble(1.0, Attributes.empty(), Context.root());
Mockito.when(doubleReservoir.collectAndReset(attributes)) Mockito.when(doubleReservoir.collectAndReset(attributes))
.thenReturn(Collections.singletonList(result)); .thenReturn(Collections.singletonList(result));
testAggregator.accumulateThenMaybeReset(attributes, /* reset= */ true); testAggregator.aggregateThenMaybeReset(0, 1, attributes, /* reset= */ true);
assertThat(testAggregator.recordedExemplars.get()).containsExactly(result); assertThat(testAggregator.recordedExemplars.get()).containsExactly(result);
} }
private static class TestAggregatorHandle<T extends ExemplarData> private static class TestAggregatorHandle<T extends ExemplarData>
extends AggregatorHandle<Void, T> { extends AggregatorHandle<PointData, T> {
final AtomicLong recordedLong = new AtomicLong(); final AtomicLong recordedLong = new AtomicLong();
final AtomicDouble recordedDouble = new AtomicDouble(); final AtomicDouble recordedDouble = new AtomicDouble();
final AtomicReference<List<T>> recordedExemplars = new AtomicReference<>(); final AtomicReference<List<T>> recordedExemplars = new AtomicReference<>();
@ -164,7 +165,12 @@ class AggregatorHandleTest {
@Nullable @Nullable
@Override @Override
protected Void doAccumulateThenMaybeReset(List<T> exemplars, boolean reset) { protected PointData doAggregateThenMaybeReset(
long startEpochNanos,
long epochNanos,
Attributes attributes,
List<T> exemplars,
boolean reset) {
recordedLong.set(0); recordedLong.set(0);
recordedDouble.set(0); recordedDouble.set(0);
recordedExemplars.set(exemplars); recordedExemplars.set(exemplars);

View File

@ -16,18 +16,22 @@ import io.opentelemetry.context.Context;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.HistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableHistogramPointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.DoubleStream;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock; import org.mockito.Mock;
@ -40,6 +44,8 @@ class DoubleExplicitBucketHistogramAggregatorTest {
@Mock ExemplarReservoir<DoubleExemplarData> reservoir; @Mock ExemplarReservoir<DoubleExemplarData> reservoir;
private static final double[] boundaries = new double[] {10.0, 100.0, 1000.0}; private static final double[] boundaries = new double[] {10.0, 100.0, 1000.0};
private static final List<Double> boundariesList =
DoubleStream.of(boundaries).boxed().collect(Collectors.toList());
private static final Resource RESOURCE = Resource.getDefault(); private static final Resource RESOURCE = Resource.getDefault();
private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO = private static final InstrumentationScopeInfo INSTRUMENTATION_SCOPE_INFO =
InstrumentationScopeInfo.empty(); InstrumentationScopeInfo.empty();
@ -56,20 +62,28 @@ class DoubleExplicitBucketHistogramAggregatorTest {
@Test @Test
void testRecordings() { void testRecordings() {
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordLong(20); aggregatorHandle.recordLong(20);
aggregatorHandle.recordLong(5); aggregatorHandle.recordLong(5);
aggregatorHandle.recordLong(150); aggregatorHandle.recordLong(150);
aggregatorHandle.recordLong(2000); aggregatorHandle.recordLong(2000);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo( .isEqualTo(
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
2175, /* hasMinMax= */ true, 5d, 2000d, new long[] {1, 1, 1, 1})); 0,
1,
Attributes.empty(),
2175,
5d,
2000d,
boundariesList,
Arrays.asList(1L, 1L, 1L, 1L)));
} }
@Test @Test
void testExemplarsInAccumulation() { void aggregateThenMaybeReset_WithExemplars() {
Attributes attributes = Attributes.builder().put("test", "value").build(); Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar = DoubleExemplarData exemplar =
ImmutableDoubleExemplarData.create( ImmutableDoubleExemplarData.create(
@ -85,54 +99,70 @@ class DoubleExplicitBucketHistogramAggregatorTest {
Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars); Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars);
DoubleExplicitBucketHistogramAggregator aggregator = DoubleExplicitBucketHistogramAggregator aggregator =
new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir); new DoubleExplicitBucketHistogramAggregator(boundaries, () -> reservoir);
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(0, attributes, Context.root()); aggregatorHandle.recordDouble(0, attributes, Context.root());
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo( .isEqualTo(
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
0, /* hasMinMax= */ true, 0, 0, new long[] {1, 0, 0, 0}, exemplars)); 0,
1,
Attributes.empty(),
0,
0.0,
0.0,
boundariesList,
Arrays.asList(1L, 0L, 0L, 0L),
exemplars));
} }
@Test @Test
void toAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(100); aggregatorHandle.recordLong(100);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo( .isEqualTo(
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
100, /* hasMinMax= */ true, 100d, 100d, new long[] {0, 1, 0, 0})); 0,
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) 1,
Attributes.empty(),
100,
100d,
100d,
boundariesList,
Arrays.asList(0L, 1L, 0L, 0L)));
assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(0); aggregatorHandle.recordLong(0);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo( .isEqualTo(
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
0, /* hasMinMax= */ true, 0d, 0d, new long[] {1, 0, 0, 0})); 0,
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) 1,
Attributes.empty(),
0,
0d,
0d,
boundariesList,
Arrays.asList(1L, 0L, 0L, 0L)));
assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test
void accumulateData() {
assertThat(aggregator.accumulateDoubleMeasurement(11.1, Attributes.empty(), Context.current()))
.isEqualTo(
ExplicitBucketHistogramAccumulation.create(
11.1, /* hasMinMax= */ true, 11.1, 11.1, new long[] {0, 1, 0, 0}));
assertThat(aggregator.accumulateLongMeasurement(10, Attributes.empty(), Context.current()))
.isEqualTo(
ExplicitBucketHistogramAccumulation.create(
10.0, /* hasMinMax= */ true, 10.0, 10.0, new long[] {1, 0, 0, 0}));
}
@Test @Test
void toMetricData() { void toMetricData() {
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordLong(10); aggregatorHandle.recordLong(10);
@ -141,13 +171,10 @@ class DoubleExplicitBucketHistogramAggregatorTest {
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap( Collections.singletonList(
Attributes.empty(), aggregatorHandle.aggregateThenMaybeReset(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)), 0, 1, Attributes.empty(), /* reset= */ true)),
AggregationTemporality.DELTA, AggregationTemporality.DELTA);
0,
10,
100);
assertThat(metricData).isNotNull(); assertThat(metricData).isNotNull();
assertThat(metricData.getType()).isEqualTo(MetricDataType.HISTOGRAM); assertThat(metricData.getType()).isEqualTo(MetricDataType.HISTOGRAM);
assertThat(metricData.getHistogramData().getAggregationTemporality()) assertThat(metricData.getHistogramData().getAggregationTemporality())
@ -167,24 +194,24 @@ class DoubleExplicitBucketHistogramAggregatorTest {
TraceFlags.getDefault(), TraceFlags.getDefault(),
TraceState.getDefault()), TraceState.getDefault()),
1); 1);
ExplicitBucketHistogramAccumulation accumulation = HistogramPointData histPoint =
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
0,
1,
Attributes.empty(),
2, 2,
/* hasMinMax= */ true,
2d, 2d,
2d, 2d,
new long[] {1, 0, 0, 0}, boundariesList,
Arrays.asList(1L, 0L, 0L, 0L),
Collections.singletonList(exemplar)); Collections.singletonList(exemplar));
assertThat( assertThat(
aggregator.toMetricData( aggregator.toMetricData(
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap(Attributes.empty(), accumulation), Collections.singletonList(histPoint),
AggregationTemporality.CUMULATIVE, AggregationTemporality.CUMULATIVE))
0,
10,
100))
.hasHistogramSatisfying( .hasHistogramSatisfying(
histogram -> histogram ->
histogram.hasPointsSatisfying( histogram.hasPointsSatisfying(
@ -200,32 +227,18 @@ class DoubleExplicitBucketHistogramAggregatorTest {
@Test @Test
void testHistogramCounts() { void testHistogramCounts() {
assertThat( AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator
.accumulateDoubleMeasurement(1.1, Attributes.empty(), Context.root())
.getCounts()
.length)
.isEqualTo(boundaries.length + 1);
assertThat(
aggregator
.accumulateLongMeasurement(1, Attributes.empty(), Context.root())
.getCounts()
.length)
.isEqualTo(boundaries.length + 1);
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(1.1); aggregatorHandle.recordDouble(1.1);
ExplicitBucketHistogramAccumulation explicitBucketHistogramAccumulation = HistogramPointData point =
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
assertThat(explicitBucketHistogramAccumulation).isNotNull(); assertThat(point).isNotNull();
assertThat(explicitBucketHistogramAccumulation.getCounts().length) assertThat(point.getCounts().size()).isEqualTo(boundaries.length + 1);
.isEqualTo(boundaries.length + 1);
} }
@Test @Test
void testMultithreadedUpdates() throws InterruptedException { void testMultithreadedUpdates() throws InterruptedException {
AggregatorHandle<ExplicitBucketHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<HistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
ImmutableList<Long> updates = ImmutableList.of(1L, 2L, 3L, 5L, 7L, 11L, 13L, 17L, 19L, 23L); ImmutableList<Long> updates = ImmutableList.of(1L, 2L, 3L, 5L, 7L, 11L, 13L, 17L, 19L, 23L);
int numberOfThreads = updates.size(); int numberOfThreads = updates.size();
@ -242,16 +255,24 @@ class DoubleExplicitBucketHistogramAggregatorTest {
for (int j = 0; j < numberOfUpdates; j++) { for (int j = 0; j < numberOfUpdates; j++) {
aggregatorHandle.recordLong(v); aggregatorHandle.recordLong(v);
if (ThreadLocalRandom.current().nextInt(10) == 0) { if (ThreadLocalRandom.current().nextInt(10) == 0) {
aggregatorHandle.accumulateThenMaybeReset( aggregatorHandle.aggregateThenMaybeReset(
Attributes.empty(), /* reset= */ false); 0, 1, Attributes.empty(), /* reset= */ false);
} }
} }
})) }))
.collect(Collectors.toList())); .collect(Collectors.toList()));
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ false)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false))
.isEqualTo( .isEqualTo(
ExplicitBucketHistogramAccumulation.create( ImmutableHistogramPointData.create(
1010000, /* hasMinMax= */ true, 1d, 23d, new long[] {50000, 50000, 0, 0})); 0,
1,
Attributes.empty(),
1010000,
1d,
23d,
boundariesList,
Arrays.asList(50000L, 50000L, 0L, 0L)));
} }
} }

View File

@ -17,6 +17,7 @@ import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets; import io.opentelemetry.sdk.metrics.data.ExponentialHistogramBuckets;
import io.opentelemetry.sdk.metrics.data.ExponentialHistogramPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.data.MetricDataType; import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
@ -68,34 +69,25 @@ class DoubleExponentialHistogramAggregatorTest {
return (int) Math.ceil(Math.log(value) * scaleFactor) - 1; return (int) Math.ceil(Math.log(value) * scaleFactor) - 1;
} }
private static ExponentialHistogramAccumulation getTestAccumulation(
List<DoubleExemplarData> exemplars, double... recordings) {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle();
for (double r : recordings) {
aggregatorHandle.recordDouble(r);
}
return aggregatorHandle.doAccumulateThenMaybeReset(exemplars, /* reset= */ true);
}
@Test @Test
void createHandle() { void createHandle() {
AggregatorHandle<?, ?> handle = aggregator.createHandle(); AggregatorHandle<?, ?> handle = aggregator.createHandle();
assertThat(handle).isInstanceOf(DoubleExponentialHistogramAggregator.Handle.class); assertThat(handle).isInstanceOf(DoubleExponentialHistogramAggregator.Handle.class);
ExponentialHistogramAccumulation accumulation = ExponentialHistogramPointData point =
((DoubleExponentialHistogramAggregator.Handle) handle) ((DoubleExponentialHistogramAggregator.Handle) handle)
.doAccumulateThenMaybeReset(Collections.emptyList(), /* reset= */ true); .doAggregateThenMaybeReset(
assertThat(accumulation.getPositiveBuckets()) 0, 1, Attributes.empty(), Collections.emptyList(), /* reset= */ true);
assertThat(point.getPositiveBuckets())
.isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); .isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class);
assertThat(accumulation.getPositiveBuckets().getScale()).isEqualTo(MAX_SCALE); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(MAX_SCALE);
assertThat(accumulation.getNegativeBuckets()) assertThat(point.getNegativeBuckets())
.isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class); .isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class);
assertThat(accumulation.getNegativeBuckets().getScale()).isEqualTo(MAX_SCALE); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(MAX_SCALE);
} }
@Test @Test
void testRecordings() { void testRecordings() {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(0.5); aggregatorHandle.recordDouble(0.5);
aggregatorHandle.recordDouble(1.0); aggregatorHandle.recordDouble(1.0);
@ -108,28 +100,29 @@ class DoubleExponentialHistogramAggregatorTest {
aggregatorHandle.recordDouble(0.0); aggregatorHandle.recordDouble(0.0);
aggregatorHandle.recordLong(0); aggregatorHandle.recordLong(0);
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
List<Long> positiveCounts = Objects.requireNonNull(acc).getPositiveBuckets().getBucketCounts(); List<Long> positiveCounts =
List<Long> negativeCounts = acc.getNegativeBuckets().getBucketCounts(); Objects.requireNonNull(point).getPositiveBuckets().getBucketCounts();
List<Long> negativeCounts = point.getNegativeBuckets().getBucketCounts();
int expectedScale = 5; // should be downscaled from 20 to 5 after recordings int expectedScale = 5; // should be downscaled from 20 to 5 after recordings
assertThat(acc.getScale()).isEqualTo(expectedScale); assertThat(point.getScale()).isEqualTo(expectedScale);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(expectedScale); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(expectedScale);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(expectedScale); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(expectedScale);
assertThat(acc.getZeroCount()).isEqualTo(2); assertThat(point.getZeroCount()).isEqualTo(2);
// Assert positive recordings are at correct index // Assert positive recordings are at correct index
int posOffset = acc.getPositiveBuckets().getOffset(); int posOffset = point.getPositiveBuckets().getOffset();
assertThat(acc.getPositiveBuckets().getTotalCount()).isEqualTo(5); assertThat(point.getPositiveBuckets().getTotalCount()).isEqualTo(5);
assertThat(positiveCounts.get(valueToIndex(expectedScale, 0.5) - posOffset)).isEqualTo(1); assertThat(positiveCounts.get(valueToIndex(expectedScale, 0.5) - posOffset)).isEqualTo(1);
assertThat(positiveCounts.get(valueToIndex(expectedScale, 1.0) - posOffset)).isEqualTo(1); assertThat(positiveCounts.get(valueToIndex(expectedScale, 1.0) - posOffset)).isEqualTo(1);
assertThat(positiveCounts.get(valueToIndex(expectedScale, 12.0) - posOffset)).isEqualTo(2); assertThat(positiveCounts.get(valueToIndex(expectedScale, 12.0) - posOffset)).isEqualTo(2);
assertThat(positiveCounts.get(valueToIndex(expectedScale, 15.213) - posOffset)).isEqualTo(1); assertThat(positiveCounts.get(valueToIndex(expectedScale, 15.213) - posOffset)).isEqualTo(1);
// Assert negative recordings are at correct index // Assert negative recordings are at correct index
int negOffset = acc.getNegativeBuckets().getOffset(); int negOffset = point.getNegativeBuckets().getOffset();
assertThat(acc.getNegativeBuckets().getTotalCount()).isEqualTo(3); assertThat(point.getNegativeBuckets().getTotalCount()).isEqualTo(3);
assertThat(negativeCounts.get(valueToIndex(expectedScale, 13.2) - negOffset)).isEqualTo(1); assertThat(negativeCounts.get(valueToIndex(expectedScale, 13.2) - negOffset)).isEqualTo(1);
assertThat(negativeCounts.get(valueToIndex(expectedScale, 2.01) - negOffset)).isEqualTo(1); assertThat(negativeCounts.get(valueToIndex(expectedScale, 2.01) - negOffset)).isEqualTo(1);
assertThat(negativeCounts.get(valueToIndex(expectedScale, 1.0) - negOffset)).isEqualTo(1); assertThat(negativeCounts.get(valueToIndex(expectedScale, 1.0) - negOffset)).isEqualTo(1);
@ -137,45 +130,45 @@ class DoubleExponentialHistogramAggregatorTest {
@Test @Test
void testInvalidRecording() { void testInvalidRecording() {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
// Non finite recordings should be ignored // Non finite recordings should be ignored
aggregatorHandle.recordDouble(Double.POSITIVE_INFINITY); aggregatorHandle.recordDouble(Double.POSITIVE_INFINITY);
aggregatorHandle.recordDouble(Double.NEGATIVE_INFINITY); aggregatorHandle.recordDouble(Double.NEGATIVE_INFINITY);
aggregatorHandle.recordDouble(Double.NaN); aggregatorHandle.recordDouble(Double.NaN);
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
assertThat(Objects.requireNonNull(acc).getSum()).isEqualTo(0); assertThat(Objects.requireNonNull(point).getSum()).isEqualTo(0);
assertThat(acc.getPositiveBuckets().getTotalCount()).isEqualTo(0); assertThat(point.getPositiveBuckets().getTotalCount()).isEqualTo(0);
assertThat(acc.getNegativeBuckets().getTotalCount()).isEqualTo(0); assertThat(point.getNegativeBuckets().getTotalCount()).isEqualTo(0);
assertThat(acc.getZeroCount()).isEqualTo(0); assertThat(point.getZeroCount()).isEqualTo(0);
} }
@ParameterizedTest @ParameterizedTest
@MethodSource("provideAggregator") @MethodSource("provideAggregator")
void testRecordingsAtLimits(DoubleExponentialHistogramAggregator aggregator) { void testRecordingsAtLimits(DoubleExponentialHistogramAggregator aggregator) {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(Double.MIN_VALUE); aggregatorHandle.recordDouble(Double.MIN_VALUE);
aggregatorHandle.recordDouble(Double.MAX_VALUE); aggregatorHandle.recordDouble(Double.MAX_VALUE);
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
List<Long> bucketCounts = Objects.requireNonNull(acc).getPositiveBuckets().getBucketCounts(); List<Long> bucketCounts = Objects.requireNonNull(point).getPositiveBuckets().getBucketCounts();
// assert buckets == [1 0 0 0 ... 1] // assert buckets == [1 0 0 0 ... 1]
assertThat(bucketCounts.get(0)).isEqualTo(1); assertThat(bucketCounts.get(0)).isEqualTo(1);
assertThat(bucketCounts.get(bucketCounts.size() - 1)).isEqualTo(1); assertThat(bucketCounts.get(bucketCounts.size() - 1)).isEqualTo(1);
assertThat(bucketCounts.stream().filter(i -> i == 0).count()) assertThat(bucketCounts.stream().filter(i -> i == 0).count())
.isEqualTo(bucketCounts.size() - 2); .isEqualTo(bucketCounts.size() - 2);
assertThat(acc.getPositiveBuckets().getTotalCount()).isEqualTo(2); assertThat(point.getPositiveBuckets().getTotalCount()).isEqualTo(2);
// With 160 buckets allowed, minimum scale is -4 // With 160 buckets allowed, minimum scale is -4
assertThat(acc.getScale()).isEqualTo(-4); assertThat(point.getScale()).isEqualTo(-4);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(-4); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(-4);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(-4); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(-4);
// if scale is -4, base is 65,536. // if scale is -4, base is 65,536.
int base = 65_536; int base = 65_536;
@ -185,20 +178,20 @@ class DoubleExponentialHistogramAggregatorTest {
// lowest bucket // lowest bucket
// As the bucket lower bound is less than Double.MIN_VALUE, Math.pow() rounds to 0 // As the bucket lower bound is less than Double.MIN_VALUE, Math.pow() rounds to 0
assertThat(Math.pow(base, acc.getPositiveBuckets().getOffset())).isEqualTo(0); assertThat(Math.pow(base, point.getPositiveBuckets().getOffset())).isEqualTo(0);
assertThat(Math.pow(base, acc.getPositiveBuckets().getOffset() + 1)) assertThat(Math.pow(base, point.getPositiveBuckets().getOffset() + 1))
.isGreaterThan(Double.MIN_VALUE); .isGreaterThan(Double.MIN_VALUE);
// highest bucket // highest bucket
assertThat(Math.pow(base, acc.getPositiveBuckets().getOffset() + bucketCounts.size() - 1)) assertThat(Math.pow(base, point.getPositiveBuckets().getOffset() + bucketCounts.size() - 1))
.isLessThanOrEqualTo(Double.MAX_VALUE); .isLessThanOrEqualTo(Double.MAX_VALUE);
// As the bucket upper bound is greater than Double.MAX_VALUE, Math.pow() rounds to infinity // As the bucket upper bound is greater than Double.MAX_VALUE, Math.pow() rounds to infinity
assertThat(Math.pow(base, acc.getPositiveBuckets().getOffset() + bucketCounts.size())) assertThat(Math.pow(base, point.getPositiveBuckets().getOffset() + bucketCounts.size()))
.isEqualTo(Double.POSITIVE_INFINITY); .isEqualTo(Double.POSITIVE_INFINITY);
} }
@Test @Test
void testExemplarsInAccumulation() { void aggregateThenMaybeReset_WithExemplars() {
DoubleExponentialHistogramAggregator agg = DoubleExponentialHistogramAggregator agg =
new DoubleExponentialHistogramAggregator(() -> reservoir, 160, MAX_SCALE); new DoubleExponentialHistogramAggregator(() -> reservoir, 160, MAX_SCALE);
@ -216,48 +209,42 @@ class DoubleExponentialHistogramAggregatorTest {
List<DoubleExemplarData> exemplars = Collections.singletonList(exemplar); List<DoubleExemplarData> exemplars = Collections.singletonList(exemplar);
Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars); Mockito.when(reservoir.collectAndReset(Attributes.empty())).thenReturn(exemplars);
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
agg.createHandle(); agg.createHandle();
aggregatorHandle.recordDouble(0, attributes, Context.root()); aggregatorHandle.recordDouble(0, attributes, Context.root());
assertThat( assertThat(
Objects.requireNonNull( Objects.requireNonNull(
aggregatorHandle.accumulateThenMaybeReset( aggregatorHandle.aggregateThenMaybeReset(
Attributes.empty(), /* reset= */ true)) 0, 1, Attributes.empty(), /* reset= */ true))
.getExemplars()) .getExemplars())
.isEqualTo(exemplars); .isEqualTo(exemplars);
} }
@Test @Test
void testAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordDouble(5.0); aggregatorHandle.recordDouble(5.0);
assertThat( assertThat(
Objects.requireNonNull( Objects.requireNonNull(
aggregatorHandle.accumulateThenMaybeReset( aggregatorHandle.aggregateThenMaybeReset(
Attributes.empty(), /* reset= */ true)) 0, 1, Attributes.empty(), /* reset= */ true))
.getPositiveBuckets() .getPositiveBuckets()
.getBucketCounts()) .getBucketCounts())
.isEqualTo(Collections.singletonList(1L)); .isEqualTo(Collections.singletonList(1L));
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test
void testAccumulateData() {
ExponentialHistogramAccumulation acc =
aggregator.accumulateDoubleMeasurement(1.2, Attributes.empty(), Context.current());
ExponentialHistogramAccumulation expected = getTestAccumulation(Collections.emptyList(), 1.2);
assertThat(acc).isEqualTo(expected);
}
@Test @Test
void testInsert1M() { void testInsert1M() {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> handle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> handle =
aggregator.createHandle(); aggregator.createHandle();
int n = 1024 * 1024 - 1; int n = 1024 * 1024 - 1;
@ -268,14 +255,14 @@ class DoubleExponentialHistogramAggregatorTest {
d += min; d += min;
} }
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
Objects.requireNonNull( Objects.requireNonNull(
handle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)); handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true));
assertThat(acc.getScale()).isEqualTo(3); assertThat(point.getScale()).isEqualTo(3);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(3); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(3);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(3); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(3);
assertThat(acc.getPositiveBuckets().getBucketCounts().size()).isEqualTo(160); assertThat(point.getPositiveBuckets().getBucketCounts().size()).isEqualTo(160);
assertThat(acc.getPositiveBuckets().getTotalCount()).isEqualTo(n); assertThat(point.getPositiveBuckets().getTotalCount()).isEqualTo(n);
} }
@Test @Test
@ -293,14 +280,14 @@ class DoubleExponentialHistogramAggregatorTest {
handle.recordDouble(4.0); handle.recordDouble(4.0);
handle.recordDouble(16.0); handle.recordDouble(16.0);
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
Objects.requireNonNull( Objects.requireNonNull(
handle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)); handle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true));
assertThat(acc.getScale()).isEqualTo(0); assertThat(point.getScale()).isEqualTo(0);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(0); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(0);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(0); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(0);
ExponentialHistogramBuckets buckets = acc.getPositiveBuckets(); ExponentialHistogramBuckets buckets = point.getPositiveBuckets();
assertThat(acc.getSum()).isEqualTo(23.5); assertThat(point.getSum()).isEqualTo(23.5);
assertThat(buckets.getOffset()).isEqualTo(-2); assertThat(buckets.getOffset()).isEqualTo(-2);
assertThat(buckets.getBucketCounts()).isEqualTo(Arrays.asList(1L, 1L, 1L, 1L, 0L, 1L)); assertThat(buckets.getBucketCounts()).isEqualTo(Arrays.asList(1L, 1L, 1L, 1L, 0L, 1L));
assertThat(buckets.getTotalCount()).isEqualTo(5); assertThat(buckets.getTotalCount()).isEqualTo(5);
@ -329,24 +316,21 @@ class DoubleExponentialHistogramAggregatorTest {
DoubleExponentialHistogramAggregator cumulativeAggregator = DoubleExponentialHistogramAggregator cumulativeAggregator =
new DoubleExponentialHistogramAggregator(reservoirSupplier, 160, MAX_SCALE); new DoubleExponentialHistogramAggregator(reservoirSupplier, 160, MAX_SCALE);
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
cumulativeAggregator.createHandle(); cumulativeAggregator.createHandle();
aggregatorHandle.recordDouble(0); aggregatorHandle.recordDouble(0);
aggregatorHandle.recordDouble(0); aggregatorHandle.recordDouble(0);
aggregatorHandle.recordDouble(123.456); aggregatorHandle.recordDouble(123.456);
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData expPoint =
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true);
MetricData metricDataCumulative = MetricData metricDataCumulative =
cumulativeAggregator.toMetricData( cumulativeAggregator.toMetricData(
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap(Attributes.empty(), acc), Collections.singletonList(expPoint),
AggregationTemporality.CUMULATIVE, AggregationTemporality.CUMULATIVE);
0,
10,
100);
// Assertions run twice to verify immutability; recordings shouldn't modify the metric data // Assertions run twice to verify immutability; recordings shouldn't modify the metric data
for (int i = 0; i < 2; i++) { for (int i = 0; i < 2; i++) {
@ -386,11 +370,8 @@ class DoubleExponentialHistogramAggregatorTest {
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap(Attributes.empty(), acc), Collections.singletonList(expPoint),
AggregationTemporality.DELTA, AggregationTemporality.DELTA);
0,
10,
100);
assertThat(metricDataDelta.getType()).isEqualTo(MetricDataType.EXPONENTIAL_HISTOGRAM); assertThat(metricDataDelta.getType()).isEqualTo(MetricDataType.EXPONENTIAL_HISTOGRAM);
assertThat(metricDataDelta.getExponentialHistogramData().getAggregationTemporality()) assertThat(metricDataDelta.getExponentialHistogramData().getAggregationTemporality())
.isEqualTo(AggregationTemporality.DELTA); .isEqualTo(AggregationTemporality.DELTA);
@ -398,7 +379,7 @@ class DoubleExponentialHistogramAggregatorTest {
@Test @Test
void testMultithreadedUpdates() throws InterruptedException { void testMultithreadedUpdates() throws InterruptedException {
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<ExponentialHistogramPointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
ImmutableList<Double> updates = ImmutableList.of(0D, 0.1D, -0.1D, 1D, -1D, 100D); ImmutableList<Double> updates = ImmutableList.of(0D, 0.1D, -0.1D, 1D, -1D, 100D);
int numberOfThreads = updates.size(); int numberOfThreads = updates.size();
@ -415,47 +396,52 @@ class DoubleExponentialHistogramAggregatorTest {
for (int j = 0; j < numberOfUpdates; j++) { for (int j = 0; j < numberOfUpdates; j++) {
aggregatorHandle.recordDouble(v); aggregatorHandle.recordDouble(v);
if (ThreadLocalRandom.current().nextInt(10) == 0) { if (ThreadLocalRandom.current().nextInt(10) == 0) {
aggregatorHandle.accumulateThenMaybeReset( aggregatorHandle.aggregateThenMaybeReset(
Attributes.empty(), /* reset= */ false); 0, 1, Attributes.empty(), /* reset= */ false);
} }
} }
})) }))
.collect(Collectors.toList())); .collect(Collectors.toList()));
ExponentialHistogramAccumulation acc = ExponentialHistogramPointData point =
Objects.requireNonNull( Objects.requireNonNull(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ false)); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ false));
assertThat(acc.getZeroCount()).isEqualTo(numberOfUpdates); assertThat(point.getZeroCount()).isEqualTo(numberOfUpdates);
assertThat(acc.getSum()).isCloseTo(100.0D * 10000, Offset.offset(0.0001)); // float error assertThat(point.getSum()).isCloseTo(100.0D * 10000, Offset.offset(0.0001)); // float error
assertThat(acc.getScale()).isEqualTo(3); assertThat(point.getScale()).isEqualTo(3);
assertThat(acc.getPositiveBuckets().getScale()).isEqualTo(3); assertThat(point.getPositiveBuckets().getScale()).isEqualTo(3);
assertThat(acc.getNegativeBuckets().getScale()).isEqualTo(3); assertThat(point.getNegativeBuckets().getScale()).isEqualTo(3);
ExponentialHistogramBuckets positiveBuckets = acc.getPositiveBuckets(); ExponentialHistogramBuckets positiveBuckets = point.getPositiveBuckets();
assertThat(positiveBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 3); assertThat(positiveBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 3);
assertThat(positiveBuckets.getOffset()).isEqualTo(-27); assertThat(positiveBuckets.getOffset()).isEqualTo(-27);
ExponentialHistogramBuckets negativeBuckets = acc.getNegativeBuckets(); ExponentialHistogramBuckets negativeBuckets = point.getNegativeBuckets();
assertThat(negativeBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 2); assertThat(negativeBuckets.getTotalCount()).isEqualTo(numberOfUpdates * 2);
assertThat(negativeBuckets.getOffset()).isEqualTo(-27); assertThat(negativeBuckets.getOffset()).isEqualTo(-27);
// Verify positive buckets have correct counts // Verify positive buckets have correct counts
List<Long> posCounts = acc.getPositiveBuckets().getBucketCounts(); List<Long> posCounts = point.getPositiveBuckets().getBucketCounts();
assertThat( assertThat(
posCounts.get(valueToIndex(acc.getScale(), 0.1) - acc.getPositiveBuckets().getOffset())) posCounts.get(
valueToIndex(point.getScale(), 0.1) - point.getPositiveBuckets().getOffset()))
.isEqualTo(numberOfUpdates); .isEqualTo(numberOfUpdates);
assertThat( assertThat(
posCounts.get(valueToIndex(acc.getScale(), 1) - acc.getPositiveBuckets().getOffset())) posCounts.get(
valueToIndex(point.getScale(), 1) - point.getPositiveBuckets().getOffset()))
.isEqualTo(numberOfUpdates); .isEqualTo(numberOfUpdates);
assertThat( assertThat(
posCounts.get(valueToIndex(acc.getScale(), 100) - acc.getPositiveBuckets().getOffset())) posCounts.get(
valueToIndex(point.getScale(), 100) - point.getPositiveBuckets().getOffset()))
.isEqualTo(numberOfUpdates); .isEqualTo(numberOfUpdates);
// Verify negative buckets have correct counts // Verify negative buckets have correct counts
List<Long> negCounts = acc.getNegativeBuckets().getBucketCounts(); List<Long> negCounts = point.getNegativeBuckets().getBucketCounts();
assertThat( assertThat(
negCounts.get(valueToIndex(acc.getScale(), 0.1) - acc.getPositiveBuckets().getOffset())) negCounts.get(
valueToIndex(point.getScale(), 0.1) - point.getPositiveBuckets().getOffset()))
.isEqualTo(numberOfUpdates); .isEqualTo(numberOfUpdates);
assertThat( assertThat(
negCounts.get(valueToIndex(acc.getScale(), 1) - acc.getPositiveBuckets().getOffset())) negCounts.get(
valueToIndex(point.getScale(), 1) - point.getPositiveBuckets().getOffset()))
.isEqualTo(numberOfUpdates); .isEqualTo(numberOfUpdates);
} }
} }

View File

@ -14,8 +14,10 @@ import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.resources.Resource;
@ -40,51 +42,54 @@ class DoubleLastValueAggregatorTest {
@Test @Test
void multipleRecords() { void multipleRecords() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(12.1); aggregatorHandle.recordDouble(12.1);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12.1); .isEqualTo(12.1);
aggregatorHandle.recordDouble(13.1); aggregatorHandle.recordDouble(13.1);
aggregatorHandle.recordDouble(14.1); aggregatorHandle.recordDouble(14.1);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(14.1); .isEqualTo(14.1);
} }
@Test @Test
void toAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordDouble(13.1); aggregatorHandle.recordDouble(13.1);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(13.1); .isEqualTo(13.1);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordDouble(12.1); aggregatorHandle.recordDouble(12.1);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12.1); .isEqualTo(12.1);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void diffAccumulation() { void diff() {
Attributes attributes = Attributes.builder().put("test", "value").build(); Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar = DoubleExemplarData exemplar =
ImmutableDoubleExemplarData.create( ImmutableDoubleExemplarData.create(
@ -108,17 +113,18 @@ class DoubleLastValueAggregatorTest {
TraceFlags.getDefault(), TraceFlags.getDefault(),
TraceState.getDefault()), TraceState.getDefault()),
2)); 2));
DoubleAccumulation result = DoublePointData result =
aggregator.diff( aggregator.diff(
DoubleAccumulation.create(1, previousExemplars), ImmutableDoublePointData.create(0, 1, Attributes.empty(), 1, previousExemplars),
DoubleAccumulation.create(2, exemplars)); ImmutableDoublePointData.create(0, 1, Attributes.empty(), 2, exemplars));
// Assert that latest measurement is kept. // Assert that latest measurement is kept.
assertThat(result).isEqualTo(DoubleAccumulation.create(2, exemplars)); assertThat(result)
.isEqualTo(ImmutableDoublePointData.create(0, 1, Attributes.empty(), 2, exemplars));
} }
@Test @Test
void toMetricData() { void toMetricData() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(10); aggregatorHandle.recordDouble(10);
@ -127,13 +133,10 @@ class DoubleLastValueAggregatorTest {
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap( Collections.singletonList(
Attributes.empty(), aggregatorHandle.aggregateThenMaybeReset(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)), 10, 100, Attributes.empty(), /* reset= */ true)),
AggregationTemporality.DELTA, AggregationTemporality.DELTA);
0,
10,
100);
assertThat(metricData) assertThat(metricData)
.hasName("name") .hasName("name")
.hasDescription("description") .hasDescription("description")

View File

@ -17,8 +17,10 @@ import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.InstrumentValueType;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.DoubleExemplarData; import io.opentelemetry.sdk.metrics.data.DoubleExemplarData;
import io.opentelemetry.sdk.metrics.data.DoublePointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoubleExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableDoublePointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
@ -59,7 +61,7 @@ class DoubleSumAggregatorTest {
@Test @Test
void multipleRecords() { void multipleRecords() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(12.1); aggregatorHandle.recordDouble(12.1);
aggregatorHandle.recordDouble(12.1); aggregatorHandle.recordDouble(12.1);
@ -68,14 +70,14 @@ class DoubleSumAggregatorTest {
aggregatorHandle.recordDouble(12.1); aggregatorHandle.recordDouble(12.1);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12.1 * 5); .isEqualTo(12.1 * 5);
} }
@Test @Test
void multipleRecords_WithNegatives() { void multipleRecords_WithNegatives() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(12); aggregatorHandle.recordDouble(12);
aggregatorHandle.recordDouble(12); aggregatorHandle.recordDouble(12);
@ -85,41 +87,44 @@ class DoubleSumAggregatorTest {
aggregatorHandle.recordDouble(-11); aggregatorHandle.recordDouble(-11);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(14); .isEqualTo(14);
} }
@Test @Test
void toAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordDouble(13); aggregatorHandle.recordDouble(13);
aggregatorHandle.recordDouble(12); aggregatorHandle.recordDouble(12);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(25); .isEqualTo(25);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordDouble(12); aggregatorHandle.recordDouble(12);
aggregatorHandle.recordDouble(-25); aggregatorHandle.recordDouble(-25);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(-13); .isEqualTo(-13);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void testExemplarsInAccumulation() { void aggregateThenMaybeReset_WithExemplars() {
Attributes attributes = Attributes.builder().put("test", "value").build(); Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar = DoubleExemplarData exemplar =
ImmutableDoubleExemplarData.create( ImmutableDoubleExemplarData.create(
@ -142,11 +147,12 @@ class DoubleSumAggregatorTest {
InstrumentType.COUNTER, InstrumentType.COUNTER,
InstrumentValueType.DOUBLE), InstrumentValueType.DOUBLE),
() -> reservoir); () -> reservoir);
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(0, attributes, Context.root()); aggregatorHandle.recordDouble(0, attributes, Context.root());
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
.isEqualTo(DoubleAccumulation.create(0, exemplars)); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo(ImmutableDoublePointData.create(0, 1, Attributes.empty(), 0, exemplars));
} }
@Test @Test
@ -171,9 +177,10 @@ class DoubleSumAggregatorTest {
"name", "description", "unit", instrumentType, InstrumentValueType.LONG), "name", "description", "unit", instrumentType, InstrumentValueType.LONG),
ExemplarReservoir::doubleNoSamples); ExemplarReservoir::doubleNoSamples);
DoubleAccumulation diffed = DoublePointData diffed =
aggregator.diff( aggregator.diff(
DoubleAccumulation.create(1d), DoubleAccumulation.create(2d, exemplars)); ImmutableDoublePointData.create(0, 1, Attributes.empty(), 1d),
ImmutableDoublePointData.create(0, 1, Attributes.empty(), 2d, exemplars));
assertThat(diffed.getValue()) assertThat(diffed.getValue())
.withFailMessage( .withFailMessage(
"Invalid diff result for instrumentType %s, temporality %s: %s", "Invalid diff result for instrumentType %s, temporality %s: %s",
@ -186,7 +193,7 @@ class DoubleSumAggregatorTest {
@Test @Test
void toMetricData() { void toMetricData() {
AggregatorHandle<DoubleAccumulation, DoubleExemplarData> aggregatorHandle = AggregatorHandle<DoublePointData, DoubleExemplarData> aggregatorHandle =
aggregator.createHandle(); aggregator.createHandle();
aggregatorHandle.recordDouble(10); aggregatorHandle.recordDouble(10);
@ -195,13 +202,10 @@ class DoubleSumAggregatorTest {
resource, resource,
scope, scope,
metricDescriptor, metricDescriptor,
Collections.singletonMap( Collections.singletonList(
Attributes.empty(), aggregatorHandle.aggregateThenMaybeReset(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)), 0, 100, Attributes.empty(), /* reset= */ true)),
AggregationTemporality.CUMULATIVE, AggregationTemporality.CUMULATIVE);
0,
10,
100);
assertThat(metricData) assertThat(metricData)
.hasName("name") .hasName("name")
.hasDescription("description") .hasDescription("description")
@ -232,18 +236,15 @@ class DoubleSumAggregatorTest {
TraceFlags.getDefault(), TraceFlags.getDefault(),
TraceState.getDefault()), TraceState.getDefault()),
1); 1);
DoubleAccumulation accumulation =
DoubleAccumulation.create(1, Collections.singletonList(exemplar));
assertThat( assertThat(
aggregator.toMetricData( aggregator.toMetricData(
resource, resource,
scope, scope,
metricDescriptor, metricDescriptor,
Collections.singletonMap(Attributes.empty(), accumulation), Collections.singletonList(
AggregationTemporality.CUMULATIVE, ImmutableDoublePointData.create(
0, 0, 1, Attributes.empty(), 1, Collections.singletonList(exemplar))),
10, AggregationTemporality.CUMULATIVE))
100))
.hasDoubleSumSatisfying( .hasDoubleSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar)));
} }

View File

@ -11,6 +11,7 @@ import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.sdk.common.InstrumentationScopeInfo; import io.opentelemetry.sdk.common.InstrumentationScopeInfo;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableGaugeData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
@ -38,53 +39,53 @@ class LongLastValueAggregatorTest {
@Test @Test
void multipleRecords() { void multipleRecords() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12L); .isEqualTo(12L);
aggregatorHandle.recordLong(13); aggregatorHandle.recordLong(13);
aggregatorHandle.recordLong(14); aggregatorHandle.recordLong(14);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(14L); .isEqualTo(14L);
} }
@Test @Test
void toAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle(); assertThat(
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(13); aggregatorHandle.recordLong(13);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(13L); .isEqualTo(13L);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12L); .isEqualTo(12L);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void toMetricData() { void toMetricData() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(10); aggregatorHandle.recordLong(10);
MetricData metricData = MetricData metricData =
@ -92,13 +93,10 @@ class LongLastValueAggregatorTest {
RESOURCE, RESOURCE,
INSTRUMENTATION_SCOPE_INFO, INSTRUMENTATION_SCOPE_INFO,
METRIC_DESCRIPTOR, METRIC_DESCRIPTOR,
Collections.singletonMap( Collections.singletonList(
Attributes.empty(), aggregatorHandle.aggregateThenMaybeReset(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)), 2, 100, Attributes.empty(), /* reset= */ true)),
AggregationTemporality.CUMULATIVE, AggregationTemporality.CUMULATIVE);
2,
10,
100);
assertThat(metricData) assertThat(metricData)
.isEqualTo( .isEqualTo(
ImmutableMetricData.createLongGauge( ImmutableMetricData.createLongGauge(

View File

@ -17,8 +17,10 @@ import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.InstrumentValueType;
import io.opentelemetry.sdk.metrics.data.AggregationTemporality; import io.opentelemetry.sdk.metrics.data.AggregationTemporality;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData; import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongExemplarData;
import io.opentelemetry.sdk.metrics.internal.data.ImmutableLongPointData;
import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.InstrumentDescriptor;
import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor; import io.opentelemetry.sdk.metrics.internal.descriptor.MetricDescriptor;
import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir; import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
@ -58,8 +60,7 @@ class LongSumAggregatorTest {
@Test @Test
void multipleRecords() { void multipleRecords() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
@ -67,17 +68,17 @@ class LongSumAggregatorTest {
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(12 * 5); .isEqualTo(12 * 5);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void multipleRecords_WithNegatives() { void multipleRecords_WithNegatives() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
aggregatorHandle.recordLong(-23); aggregatorHandle.recordLong(-23);
@ -86,43 +87,46 @@ class LongSumAggregatorTest {
aggregatorHandle.recordLong(-11); aggregatorHandle.recordLong(-11);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(14); .isEqualTo(14);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void toAccumulationAndReset() { void aggregateThenMaybeReset() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle(); assertThat(
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(13); aggregatorHandle.recordLong(13);
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(25); .isEqualTo(25);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
aggregatorHandle.recordLong(12); aggregatorHandle.recordLong(12);
aggregatorHandle.recordLong(-25); aggregatorHandle.recordLong(-25);
assertThat( assertThat(
aggregatorHandle aggregatorHandle
.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true) .aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true)
.getValue()) .getValue())
.isEqualTo(-13); .isEqualTo(-13);
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isNull(); .isNull();
} }
@Test @Test
void testExemplarsInAccumulation() { void aggregateThenMaybeReset_WithExemplars() {
Attributes attributes = Attributes.builder().put("test", "value").build(); Attributes attributes = Attributes.builder().put("test", "value").build();
LongExemplarData exemplar = LongExemplarData exemplar =
ImmutableLongExemplarData.create( ImmutableLongExemplarData.create(
@ -145,11 +149,11 @@ class LongSumAggregatorTest {
InstrumentType.COUNTER, InstrumentType.COUNTER,
InstrumentValueType.LONG), InstrumentValueType.LONG),
() -> reservoir); () -> reservoir);
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(0, attributes, Context.root()); aggregatorHandle.recordLong(0, attributes, Context.root());
assertThat(aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)) assertThat(
.isEqualTo(LongAccumulation.create(0, exemplars)); aggregatorHandle.aggregateThenMaybeReset(0, 1, Attributes.empty(), /* reset= */ true))
.isEqualTo(ImmutableLongPointData.create(0, 1, Attributes.empty(), 0, exemplars));
} }
@Test @Test
@ -173,8 +177,10 @@ class LongSumAggregatorTest {
"name", "description", "unit", instrumentType, InstrumentValueType.LONG), "name", "description", "unit", instrumentType, InstrumentValueType.LONG),
ExemplarReservoir::longNoSamples); ExemplarReservoir::longNoSamples);
LongAccumulation diffed = LongPointData diffed =
aggregator.diff(LongAccumulation.create(1L), LongAccumulation.create(2L, exemplars)); aggregator.diff(
ImmutableLongPointData.create(0, 1, Attributes.empty(), 1L),
ImmutableLongPointData.create(0, 1, Attributes.empty(), 2L, exemplars));
assertThat(diffed.getValue()) assertThat(diffed.getValue())
.withFailMessage( .withFailMessage(
"Invalid diff result for instrumentType %s, temporality %s: %s", "Invalid diff result for instrumentType %s, temporality %s: %s",
@ -188,8 +194,7 @@ class LongSumAggregatorTest {
@Test @Test
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
void toMetricData() { void toMetricData() {
AggregatorHandle<LongAccumulation, LongExemplarData> aggregatorHandle = AggregatorHandle<LongPointData, LongExemplarData> aggregatorHandle = aggregator.createHandle();
aggregator.createHandle();
aggregatorHandle.recordLong(10); aggregatorHandle.recordLong(10);
MetricData metricData = MetricData metricData =
@ -197,13 +202,10 @@ class LongSumAggregatorTest {
resource, resource,
library, library,
metricDescriptor, metricDescriptor,
Collections.singletonMap( Collections.singletonList(
Attributes.empty(), aggregatorHandle.aggregateThenMaybeReset(
aggregatorHandle.accumulateThenMaybeReset(Attributes.empty(), /* reset= */ true)), 0, 100, Attributes.empty(), /* reset= */ true)),
AggregationTemporality.CUMULATIVE, AggregationTemporality.CUMULATIVE);
0,
10,
100);
assertThat(metricData) assertThat(metricData)
.hasName("name") .hasName("name")
.hasDescription("description") .hasDescription("description")
@ -234,17 +236,16 @@ class LongSumAggregatorTest {
TraceFlags.getDefault(), TraceFlags.getDefault(),
TraceState.getDefault()), TraceState.getDefault()),
1); 1);
LongAccumulation accumulation = LongAccumulation.create(1, Collections.singletonList(exemplar));
assertThat( assertThat(
aggregator.toMetricData( aggregator.toMetricData(
resource, resource,
library, library,
metricDescriptor, metricDescriptor,
Collections.singletonMap(Attributes.empty(), accumulation), Collections.singletonList(
AggregationTemporality.CUMULATIVE, ImmutableLongPointData.create(
0, 0, 1, Attributes.empty(), 1, Collections.singletonList(exemplar))),
10, AggregationTemporality.CUMULATIVE))
100))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar))); sum -> sum.hasPointsSatisfying(point -> point.hasValue(1).hasExemplars(exemplar)));
} }

View File

@ -5,6 +5,8 @@
package io.opentelemetry.sdk.metrics.internal.state; package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.assertThat;
import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry; import static io.opentelemetry.sdk.testing.assertj.OpenTelemetryAssertions.attributeEntry;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@ -85,9 +87,12 @@ class AsynchronousMetricStorageTest {
@Test @Test
void recordLong() { void recordLong() {
longCounterStorage.recordLong(1, Attributes.builder().put("key", "a").build()); longCounterStorage.record(
longCounterStorage.recordLong(2, Attributes.builder().put("key", "b").build()); longMeasurement(0, 1, 1, Attributes.builder().put("key", "a").build()));
longCounterStorage.recordLong(3, Attributes.builder().put("key", "c").build()); longCounterStorage.record(
longMeasurement(0, 1, 2, Attributes.builder().put("key", "b").build()));
longCounterStorage.record(
longMeasurement(0, 1, 3, Attributes.builder().put("key", "c").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime()))
.satisfies( .satisfies(
@ -107,9 +112,12 @@ class AsynchronousMetricStorageTest {
@Test @Test
void recordDouble() { void recordDouble() {
doubleCounterStorage.recordDouble(1.1, Attributes.builder().put("key", "a").build()); doubleCounterStorage.record(
doubleCounterStorage.recordDouble(2.2, Attributes.builder().put("key", "b").build()); doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "a").build()));
doubleCounterStorage.recordDouble(3.3, Attributes.builder().put("key", "c").build()); doubleCounterStorage.record(
doubleMeasurement(0, 1, 2.2, Attributes.builder().put("key", "b").build()));
doubleCounterStorage.record(
doubleMeasurement(0, 1, 3.3, Attributes.builder().put("key", "c").build()));
assertThat(doubleCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) assertThat(doubleCounterStorage.collect(resource, scope, 0, testClock.nanoTime()))
.satisfies( .satisfies(
@ -142,7 +150,8 @@ class AsynchronousMetricStorageTest {
InstrumentDescriptor.create( InstrumentDescriptor.create(
"name", "description", "unit", InstrumentType.COUNTER, InstrumentValueType.LONG)); "name", "description", "unit", InstrumentType.COUNTER, InstrumentValueType.LONG));
storage.recordLong(1, Attributes.builder().put("key1", "a").put("key2", "b").build()); storage.record(
longMeasurement(0, 1, 1, Attributes.builder().put("key1", "a").put("key2", "b").build()));
assertThat(storage.collect(resource, scope, 0, testClock.nanoTime())) assertThat(storage.collect(resource, scope, 0, testClock.nanoTime()))
.satisfies( .satisfies(
@ -157,23 +166,26 @@ class AsynchronousMetricStorageTest {
} }
@Test @Test
void record_MaxAccumulations() { void record_MaxCardinality() {
for (int i = 0; i <= MetricStorage.MAX_ACCUMULATIONS + 1; i++) { for (int i = 0; i <= MetricStorage.MAX_CARDINALITY + 1; i++) {
longCounterStorage.recordLong(1, Attributes.builder().put("key" + i, "val").build()); longCounterStorage.record(
longMeasurement(0, 1, 1, Attributes.builder().put("key" + i, "val").build()));
} }
assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime()))
.satisfies( .satisfies(
metricData -> metricData ->
assertThat(metricData.getLongSumData().getPoints()) assertThat(metricData.getLongSumData().getPoints())
.hasSize(MetricStorage.MAX_ACCUMULATIONS)); .hasSize(MetricStorage.MAX_CARDINALITY));
logs.assertContains("Instrument long-counter has exceeded the maximum allowed accumulations"); logs.assertContains("Instrument long-counter has exceeded the maximum allowed cardinality");
} }
@Test @Test
void record_DuplicateAttributes() { void record_DuplicateAttributes() {
longCounterStorage.recordLong(1, Attributes.builder().put("key1", "a").build()); longCounterStorage.record(
longCounterStorage.recordLong(2, Attributes.builder().put("key1", "a").build()); longMeasurement(0, 1, 1, Attributes.builder().put("key1", "a").build()));
longCounterStorage.record(
longMeasurement(0, 1, 2, Attributes.builder().put("key1", "a").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime())) assertThat(longCounterStorage.collect(resource, scope, 0, testClock.nanoTime()))
.satisfies( .satisfies(
@ -191,8 +203,8 @@ class AsynchronousMetricStorageTest {
@Test @Test
void collect_CumulativeReportsCumulativeObservations() { void collect_CumulativeReportsCumulativeObservations() {
// Record measurement and collect at time 10 // Record measurement and collect at time 10
longCounterStorage.recordLong(3, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 10, 3, Attributes.empty()));
assertThat(longCounterStorage.collect(resource, scope, 0, 10)) assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isCumulative() sum.isCumulative()
@ -206,9 +218,10 @@ class AsynchronousMetricStorageTest {
registeredReader.setLastCollectEpochNanos(10); registeredReader.setLastCollectEpochNanos(10);
// Record measurements and collect at time 30 // Record measurements and collect at time 30
longCounterStorage.recordLong(3, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 30, 3, Attributes.empty()));
longCounterStorage.recordLong(6, Attributes.builder().put("key", "value1").build()); longCounterStorage.record(
assertThat(longCounterStorage.collect(resource, scope, 0, 30)) longMeasurement(0, 30, 6, Attributes.builder().put("key", "value1").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isCumulative() sum.isCumulative()
@ -228,9 +241,10 @@ class AsynchronousMetricStorageTest {
registeredReader.setLastCollectEpochNanos(30); registeredReader.setLastCollectEpochNanos(30);
// Record measurement and collect at time 35 // Record measurement and collect at time 35
longCounterStorage.recordLong(4, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 35, 4, Attributes.empty()));
longCounterStorage.recordLong(5, Attributes.builder().put("key", "value2").build()); longCounterStorage.record(
assertThat(longCounterStorage.collect(resource, scope, 0, 35)) longMeasurement(0, 35, 5, Attributes.builder().put("key", "value2").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isCumulative() sum.isCumulative()
@ -264,8 +278,8 @@ class AsynchronousMetricStorageTest {
InstrumentValueType.LONG)); InstrumentValueType.LONG));
// Record measurement and collect at time 10 // Record measurement and collect at time 10
longCounterStorage.recordLong(3, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 10, 3, Attributes.empty()));
assertThat(longCounterStorage.collect(resource, scope, 0, 10)) assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isDelta() sum.isDelta()
@ -279,9 +293,10 @@ class AsynchronousMetricStorageTest {
registeredReader.setLastCollectEpochNanos(10); registeredReader.setLastCollectEpochNanos(10);
// Record measurement and collect at time 30 // Record measurement and collect at time 30
longCounterStorage.recordLong(3, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 30, 3, Attributes.empty()));
longCounterStorage.recordLong(6, Attributes.builder().put("key", "value1").build()); longCounterStorage.record(
assertThat(longCounterStorage.collect(resource, scope, 0, 30)) longMeasurement(0, 30, 6, Attributes.builder().put("key", "value1").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isDelta() sum.isDelta()
@ -301,9 +316,10 @@ class AsynchronousMetricStorageTest {
registeredReader.setLastCollectEpochNanos(30); registeredReader.setLastCollectEpochNanos(30);
// Record measurement and collect at time 35 // Record measurement and collect at time 35
longCounterStorage.recordLong(4, Attributes.empty()); longCounterStorage.record(longMeasurement(0, 35, 4, Attributes.empty()));
longCounterStorage.recordLong(5, Attributes.builder().put("key", "value2").build()); longCounterStorage.record(
assertThat(longCounterStorage.collect(resource, scope, 0, 35)) longMeasurement(0, 35, 5, Attributes.builder().put("key", "value2").build()));
assertThat(longCounterStorage.collect(resource, scope, 0, 0))
.hasLongSumSatisfying( .hasLongSumSatisfying(
sum -> sum ->
sum.isDelta() sum.isDelta()

View File

@ -5,11 +5,11 @@
package io.opentelemetry.sdk.metrics.internal.state; package io.opentelemetry.sdk.metrics.internal.state;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.doubleMeasurement;
import static io.opentelemetry.sdk.metrics.internal.state.Measurement.longMeasurement;
import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyDouble;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.never; import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -135,12 +135,13 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Collections.singletonList(measurement1), callback); CallbackRegistration.create(Collections.singletonList(measurement1), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
assertThat(counter.get()).isEqualTo(1.1); assertThat(counter.get()).isEqualTo(1.1);
verify(storage1).recordDouble(1.1, Attributes.builder().put("key", "val").build()); verify(storage1)
verify(storage2, never()).recordDouble(anyDouble(), any()); .record(doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "val").build()));
verify(storage3, never()).recordDouble(anyDouble(), any()); verify(storage2, never()).record(any());
verify(storage3, never()).record(any());
} }
@Test @Test
@ -153,12 +154,14 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Collections.singletonList(measurement2), callback); CallbackRegistration.create(Collections.singletonList(measurement2), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
assertThat(counter.get()).isEqualTo(1); assertThat(counter.get()).isEqualTo(1);
verify(storage1, never()).recordLong(anyLong(), any()); verify(storage1, never()).record(any());
verify(storage2).recordLong(1, Attributes.builder().put("key", "val").build()); verify(storage2)
verify(storage3).recordLong(1, Attributes.builder().put("key", "val").build()); .record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build()));
verify(storage3)
.record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build()));
} }
@Test @Test
@ -175,13 +178,16 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Arrays.asList(measurement1, measurement2), callback); CallbackRegistration.create(Arrays.asList(measurement1, measurement2), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
assertThat(doubleCounter.get()).isEqualTo(1.1); assertThat(doubleCounter.get()).isEqualTo(1.1);
assertThat(longCounter.get()).isEqualTo(1); assertThat(longCounter.get()).isEqualTo(1);
verify(storage1).recordDouble(1.1, Attributes.builder().put("key", "val").build()); verify(storage1)
verify(storage2).recordLong(1, Attributes.builder().put("key", "val").build()); .record(doubleMeasurement(0, 1, 1.1, Attributes.builder().put("key", "val").build()));
verify(storage3).recordLong(1, Attributes.builder().put("key", "val").build()); verify(storage2)
.record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build()));
verify(storage3)
.record(longMeasurement(0, 1, 1, Attributes.builder().put("key", "val").build()));
} }
@Test @Test
@ -197,7 +203,7 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Collections.singletonList(measurement), callback); CallbackRegistration.create(Collections.singletonList(measurement), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
assertThat(counter.get()).isEqualTo(0); assertThat(counter.get()).isEqualTo(0);
} }
@ -211,11 +217,11 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Arrays.asList(measurement1, measurement2), callback); CallbackRegistration.create(Arrays.asList(measurement1, measurement2), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
verify(storage1, never()).recordDouble(anyDouble(), any()); verify(storage1, never()).record(any());
verify(storage2, never()).recordDouble(anyDouble(), any()); verify(storage2, never()).record(any());
verify(storage3, never()).recordDouble(anyDouble(), any()); verify(storage3, never()).record(any());
logs.assertContains("An exception occurred invoking callback"); logs.assertContains("An exception occurred invoking callback");
} }
@ -228,11 +234,11 @@ class CallbackRegistrationTest {
CallbackRegistration callbackRegistration = CallbackRegistration callbackRegistration =
CallbackRegistration.create(Collections.singletonList(measurement2), callback); CallbackRegistration.create(Collections.singletonList(measurement2), callback);
callbackRegistration.invokeCallback(registeredReader); callbackRegistration.invokeCallback(registeredReader, 0, 1);
verify(storage1, never()).recordDouble(anyDouble(), any()); verify(storage1, never()).record(any());
verify(storage2, never()).recordDouble(anyDouble(), any()); verify(storage2, never()).record(any());
verify(storage3, never()).recordDouble(anyDouble(), any()); verify(storage3, never()).record(any());
logs.assertContains("An exception occurred invoking callback"); logs.assertContains("An exception occurred invoking callback");
} }

View File

@ -18,6 +18,7 @@ import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.InstrumentType;
import io.opentelemetry.sdk.metrics.InstrumentValueType; import io.opentelemetry.sdk.metrics.InstrumentValueType;
import io.opentelemetry.sdk.metrics.data.LongExemplarData; import io.opentelemetry.sdk.metrics.data.LongExemplarData;
import io.opentelemetry.sdk.metrics.data.LongPointData;
import io.opentelemetry.sdk.metrics.data.MetricData; import io.opentelemetry.sdk.metrics.data.MetricData;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator; import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory; import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
@ -54,7 +55,7 @@ public class SynchronousMetricStorageTest {
private final RegisteredReader cumulativeReader = private final RegisteredReader cumulativeReader =
RegisteredReader.create(InMemoryMetricReader.create(), ViewRegistry.create()); RegisteredReader.create(InMemoryMetricReader.create(), ViewRegistry.create());
private final TestClock testClock = TestClock.create(); private final TestClock testClock = TestClock.create();
private final Aggregator<Long, LongExemplarData> aggregator = private final Aggregator<LongPointData, LongExemplarData> aggregator =
((AggregatorFactory) Aggregation.sum()) ((AggregatorFactory) Aggregation.sum())
.createAggregator(DESCRIPTOR, ExemplarFilter.alwaysOff()); .createAggregator(DESCRIPTOR, ExemplarFilter.alwaysOff());
private final AttributesProcessor attributesProcessor = AttributesProcessor.noop(); private final AttributesProcessor attributesProcessor = AttributesProcessor.noop();
@ -62,10 +63,10 @@ public class SynchronousMetricStorageTest {
@Test @Test
void attributesProcessor_used() { void attributesProcessor_used() {
AttributesProcessor spyAttributesProcessor = Mockito.spy(this.attributesProcessor); AttributesProcessor spyAttributesProcessor = Mockito.spy(this.attributesProcessor);
SynchronousMetricStorage accumulator = SynchronousMetricStorage storage =
new DefaultSynchronousMetricStorage<>( new DefaultSynchronousMetricStorage<>(
cumulativeReader, METRIC_DESCRIPTOR, aggregator, spyAttributesProcessor); cumulativeReader, METRIC_DESCRIPTOR, aggregator, spyAttributesProcessor);
accumulator.bind(Attributes.empty()); storage.bind(Attributes.empty());
Mockito.verify(spyAttributesProcessor).process(Attributes.empty(), Context.current()); Mockito.verify(spyAttributesProcessor).process(Attributes.empty(), Context.current());
} }
@ -75,12 +76,12 @@ public class SynchronousMetricStorageTest {
AttributesProcessor attributesProcessor = AttributesProcessor attributesProcessor =
AttributesProcessor.append(Attributes.builder().put("modifiedK", "modifiedV").build()); AttributesProcessor.append(Attributes.builder().put("modifiedK", "modifiedV").build());
AttributesProcessor spyLabelsProcessor = Mockito.spy(attributesProcessor); AttributesProcessor spyLabelsProcessor = Mockito.spy(attributesProcessor);
SynchronousMetricStorage accumulator = SynchronousMetricStorage storage =
new DefaultSynchronousMetricStorage<>( new DefaultSynchronousMetricStorage<>(
cumulativeReader, METRIC_DESCRIPTOR, aggregator, spyLabelsProcessor); cumulativeReader, METRIC_DESCRIPTOR, aggregator, spyLabelsProcessor);
BoundStorageHandle handle = accumulator.bind(labels); BoundStorageHandle handle = storage.bind(labels);
handle.recordDouble(1, labels, Context.root()); handle.recordDouble(1, labels, Context.root());
MetricData md = accumulator.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, testClock.now()); MetricData md = storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, testClock.now());
assertThat(md) assertThat(md)
.hasDoubleSumSatisfying( .hasDoubleSumSatisfying(
sum -> sum ->
@ -197,7 +198,7 @@ public class SynchronousMetricStorageTest {
cumulativeReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor); cumulativeReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor);
// Record measurements for max number of attributes // Record measurements for max number of attributes
for (int i = 0; i < MetricStorage.MAX_ACCUMULATIONS; i++) { for (int i = 0; i < MetricStorage.MAX_CARDINALITY; i++) {
storage.recordDouble( storage.recordDouble(
3, Attributes.builder().put("key", "value" + i).build(), Context.current()); 3, Attributes.builder().put("key", "value" + i).build(), Context.current());
} }
@ -207,7 +208,7 @@ public class SynchronousMetricStorageTest {
sum.satisfies( sum.satisfies(
sumData -> sumData ->
assertThat(sumData.getPoints()) assertThat(sumData.getPoints())
.hasSize(MetricStorage.MAX_ACCUMULATIONS) .hasSize(MetricStorage.MAX_CARDINALITY)
.allSatisfy( .allSatisfy(
point -> { point -> {
assertThat(point.getStartEpochNanos()).isEqualTo(0); assertThat(point.getStartEpochNanos()).isEqualTo(0);
@ -220,7 +221,7 @@ public class SynchronousMetricStorageTest {
// Record measurement for additional attribute, exceeding limit // Record measurement for additional attribute, exceeding limit
storage.recordDouble( storage.recordDouble(
3, 3,
Attributes.builder().put("key", "value" + MetricStorage.MAX_ACCUMULATIONS + 1).build(), Attributes.builder().put("key", "value" + MetricStorage.MAX_CARDINALITY + 1).build(),
Context.current()); Context.current());
assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 20)) assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 20))
.hasDoubleSumSatisfying( .hasDoubleSumSatisfying(
@ -228,7 +229,7 @@ public class SynchronousMetricStorageTest {
sum.satisfies( sum.satisfies(
sumData -> sumData ->
assertThat(sumData.getPoints()) assertThat(sumData.getPoints())
.hasSize(MetricStorage.MAX_ACCUMULATIONS) .hasSize(MetricStorage.MAX_CARDINALITY)
.allSatisfy( .allSatisfy(
point -> { point -> {
assertThat(point.getStartEpochNanos()).isEqualTo(0); assertThat(point.getStartEpochNanos()).isEqualTo(0);
@ -240,8 +241,8 @@ public class SynchronousMetricStorageTest {
point point
.getAttributes() .getAttributes()
.get(AttributeKey.stringKey("key")) .get(AttributeKey.stringKey("key"))
.equals("value" + MetricStorage.MAX_ACCUMULATIONS + 1)))); .equals("value" + MetricStorage.MAX_CARDINALITY + 1))));
logs.assertContains("Instrument name has exceeded the maximum allowed accumulations"); logs.assertContains("Instrument name has exceeded the maximum allowed cardinality");
} }
@Test @Test
@ -251,7 +252,7 @@ public class SynchronousMetricStorageTest {
deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor); deltaReader, METRIC_DESCRIPTOR, aggregator, attributesProcessor);
// Record measurements for max number of attributes // Record measurements for max number of attributes
for (int i = 0; i < MetricStorage.MAX_ACCUMULATIONS; i++) { for (int i = 0; i < MetricStorage.MAX_CARDINALITY; i++) {
storage.recordDouble( storage.recordDouble(
3, Attributes.builder().put("key", "value" + i).build(), Context.current()); 3, Attributes.builder().put("key", "value" + i).build(), Context.current());
} }
@ -261,7 +262,7 @@ public class SynchronousMetricStorageTest {
sum.satisfies( sum.satisfies(
sumData -> sumData ->
assertThat(sumData.getPoints()) assertThat(sumData.getPoints())
.hasSize(MetricStorage.MAX_ACCUMULATIONS) .hasSize(MetricStorage.MAX_CARDINALITY)
.allSatisfy( .allSatisfy(
point -> { point -> {
assertThat(point.getStartEpochNanos()).isEqualTo(0); assertThat(point.getStartEpochNanos()).isEqualTo(0);
@ -274,7 +275,7 @@ public class SynchronousMetricStorageTest {
// Record measurement for additional attribute, should not exceed limit due to reset // Record measurement for additional attribute, should not exceed limit due to reset
storage.recordDouble( storage.recordDouble(
3, 3,
Attributes.builder().put("key", "value" + MetricStorage.MAX_ACCUMULATIONS + 1).build(), Attributes.builder().put("key", "value" + MetricStorage.MAX_CARDINALITY + 1).build(),
Context.current()); Context.current());
assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 20)) assertThat(storage.collect(RESOURCE, INSTRUMENTATION_SCOPE_INFO, 0, 20))
.hasDoubleSumSatisfying( .hasDoubleSumSatisfying(
@ -288,13 +289,13 @@ public class SynchronousMetricStorageTest {
.hasValue(3) .hasValue(3)
.hasAttributes( .hasAttributes(
Attributes.builder() Attributes.builder()
.put("key", "value" + MetricStorage.MAX_ACCUMULATIONS + 1) .put("key", "value" + MetricStorage.MAX_CARDINALITY + 1)
.build()))); .build())));
assertThat(logs.getEvents()).isEmpty(); assertThat(logs.getEvents()).isEmpty();
deltaReader.setLastCollectEpochNanos(20); deltaReader.setLastCollectEpochNanos(20);
// Record measurements exceeding max number of attributes. Last measurement should be dropped // Record measurements exceeding max number of attributes. Last measurement should be dropped
for (int i = 0; i < MetricStorage.MAX_ACCUMULATIONS + 1; i++) { for (int i = 0; i < MetricStorage.MAX_CARDINALITY + 1; i++) {
storage.recordDouble( storage.recordDouble(
3, Attributes.builder().put("key", "value" + i).build(), Context.current()); 3, Attributes.builder().put("key", "value" + i).build(), Context.current());
} }
@ -304,7 +305,7 @@ public class SynchronousMetricStorageTest {
sum.satisfies( sum.satisfies(
sumData -> sumData ->
assertThat(sumData.getPoints()) assertThat(sumData.getPoints())
.hasSize(MetricStorage.MAX_ACCUMULATIONS) .hasSize(MetricStorage.MAX_CARDINALITY)
.allSatisfy( .allSatisfy(
point -> { point -> {
assertThat(point.getStartEpochNanos()).isEqualTo(20); assertThat(point.getStartEpochNanos()).isEqualTo(20);
@ -316,7 +317,7 @@ public class SynchronousMetricStorageTest {
point point
.getAttributes() .getAttributes()
.get(AttributeKey.stringKey("key")) .get(AttributeKey.stringKey("key"))
.equals("value" + MetricStorage.MAX_ACCUMULATIONS + 1)))); .equals("value" + MetricStorage.MAX_CARDINALITY + 1))));
logs.assertContains("Instrument name has exceeded the maximum allowed accumulations"); logs.assertContains("Instrument name has exceeded the maximum allowed cardinality");
} }
} }