Add MaxScale config parameter to ExponentialHistogram (#5044)

This commit is contained in:
jack-berg 2023-01-06 09:36:48 -06:00 committed by GitHub
parent 49f4cf093d
commit b08edb6504
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 62 additions and 32 deletions

View File

@ -207,7 +207,7 @@ public final class ViewConfig {
throw new ConfigurationException("max_buckets must be an integer", e);
}
if (maxBuckets != null) {
return ExponentialHistogramAggregation.create(maxBuckets);
return ExponentialHistogramAggregation.create(maxBuckets, 20);
}
}
return aggregation;

View File

@ -113,7 +113,8 @@ public class HistogramCollectBenchmark {
@SuppressWarnings("ImmutableEnumChecker")
public enum AggregationGenerator {
EXPLICIT_BUCKET_HISTOGRAM(Aggregation.explicitBucketHistogram()),
EXPONENTIAL_BUCKET_HISTOGRAM(ExponentialHistogramAggregation.getDefault());
DEFAULT_EXPONENTIAL_BUCKET_HISTOGRAM(ExponentialHistogramAggregation.getDefault()),
ZERO_MAX_SCALE_EXPONENTIAL_BUCKET_HISTOGRAM(ExponentialHistogramAggregation.create(160, 0));
private final Aggregation aggregation;

View File

@ -35,7 +35,7 @@ public final class DoubleExponentialHistogramAggregator
private final Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier;
private final int maxBuckets;
private final int startingScale;
private final int maxScale;
/**
* Constructs an exponential histogram aggregator.
@ -45,15 +45,15 @@ public final class DoubleExponentialHistogramAggregator
public DoubleExponentialHistogramAggregator(
Supplier<ExemplarReservoir<DoubleExemplarData>> reservoirSupplier,
int maxBuckets,
int startingScale) {
int maxScale) {
this.reservoirSupplier = reservoirSupplier;
this.maxBuckets = maxBuckets;
this.startingScale = startingScale;
this.maxScale = maxScale;
}
@Override
public AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> createHandle() {
return new Handle(reservoirSupplier.get(), maxBuckets, startingScale);
return new Handle(reservoirSupplier.get(), maxBuckets, maxScale);
}
/**
@ -187,7 +187,7 @@ public final class DoubleExponentialHistogramAggregator
private long count;
private int scale;
Handle(ExemplarReservoir<DoubleExemplarData> reservoir, int maxBuckets, int startingScale) {
Handle(ExemplarReservoir<DoubleExemplarData> reservoir, int maxBuckets, int maxScale) {
super(reservoir);
this.maxBuckets = maxBuckets;
this.sum = 0;
@ -195,7 +195,7 @@ public final class DoubleExponentialHistogramAggregator
this.min = Double.MAX_VALUE;
this.max = -1;
this.count = 0;
this.scale = startingScale;
this.scale = maxScale;
}
@Override

View File

@ -25,10 +25,10 @@ final class DoubleExponentialHistogramBuckets implements ExponentialHistogramBuc
private ExponentialHistogramIndexer exponentialHistogramIndexer;
private long totalCount;
DoubleExponentialHistogramBuckets(int startingScale, int maxBuckets) {
DoubleExponentialHistogramBuckets(int scale, int maxBuckets) {
this.counts = new AdaptingCircularBufferCounter(maxBuckets);
this.scale = startingScale;
this.exponentialHistogramIndexer = ExponentialHistogramIndexer.get(scale);
this.scale = scale;
this.exponentialHistogramIndexer = ExponentialHistogramIndexer.get(this.scale);
this.totalCount = 0;
}

View File

@ -11,6 +11,7 @@ import io.opentelemetry.sdk.common.Clock;
import io.opentelemetry.sdk.internal.RandomSupplier;
import io.opentelemetry.sdk.metrics.Aggregation;
import io.opentelemetry.sdk.metrics.data.ExemplarData;
import io.opentelemetry.sdk.metrics.data.MetricDataType;
import io.opentelemetry.sdk.metrics.internal.aggregator.Aggregator;
import io.opentelemetry.sdk.metrics.internal.aggregator.AggregatorFactory;
import io.opentelemetry.sdk.metrics.internal.aggregator.DoubleExponentialHistogramAggregator;
@ -27,24 +28,38 @@ import io.opentelemetry.sdk.metrics.internal.exemplar.ExemplarReservoir;
public final class ExponentialHistogramAggregation implements Aggregation, AggregatorFactory {
private static final int DEFAULT_MAX_BUCKETS = 160;
private static final int DEFAULT_STARTING_SCALE = 20;
private static final int DEFAULT_MAX_SCALE = 20;
private static final Aggregation DEFAULT =
new ExponentialHistogramAggregation(DEFAULT_MAX_BUCKETS);
new ExponentialHistogramAggregation(DEFAULT_MAX_BUCKETS, DEFAULT_MAX_SCALE);
private final int maxBuckets;
private final int maxScale;
private ExponentialHistogramAggregation(int maxBuckets) {
private ExponentialHistogramAggregation(int maxBuckets, int maxScale) {
this.maxBuckets = maxBuckets;
this.maxScale = maxScale;
}
public static Aggregation getDefault() {
return DEFAULT;
}
public static Aggregation create(int maxBuckets) {
/**
* Aggregations measurements into an {@link MetricDataType#EXPONENTIAL_HISTOGRAM}.
*
* @param maxBuckets the max number of positive buckets and negative buckets (max total buckets is
* 2 * {@code maxBuckets} + 1 zero bucket).
* @param maxScale the maximum and initial scale. If measurements can't fit in a particular scale
* given the {@code maxBuckets}, the scale is reduced until the measurements can be
* accommodated. Setting maxScale may reduce the number of downscales. Additionally, the
* performance of computing bucket index is improved when scale is <= 0.
* @return the aggregation
*/
public static Aggregation create(int maxBuckets, int maxScale) {
checkArgument(maxBuckets >= 1, "maxBuckets must be > 0");
return new ExponentialHistogramAggregation(maxBuckets);
checkArgument(maxScale <= 20 && maxScale >= -10, "maxScale must be -10 <= x <= 20");
return new ExponentialHistogramAggregation(maxBuckets, maxScale);
}
@Override
@ -61,7 +76,7 @@ public final class ExponentialHistogramAggregation implements Aggregation, Aggre
Runtime.getRuntime().availableProcessors(),
RandomSupplier.platformDefault())),
maxBuckets,
DEFAULT_STARTING_SCALE);
maxScale);
}
@Override
@ -77,6 +92,10 @@ public final class ExponentialHistogramAggregation implements Aggregation, Aggre
@Override
public String toString() {
return "ExponentialHistogramAggregation{maxBuckets=" + maxBuckets + "}";
return "ExponentialHistogramAggregation{maxBuckets="
+ maxBuckets
+ ",maxScale="
+ maxScale
+ "}";
}
}

View File

@ -31,10 +31,10 @@ class AggregationTest {
// TODO(jack-berg): Use Aggregation.exponentialHistogram() when available
assertThat(ExponentialHistogramAggregation.getDefault())
.asString()
.isEqualTo("ExponentialHistogramAggregation{maxBuckets=160}");
assertThat(ExponentialHistogramAggregation.create(1))
.isEqualTo("ExponentialHistogramAggregation{maxBuckets=160,maxScale=20}");
assertThat(ExponentialHistogramAggregation.create(1, 0))
.asString()
.isEqualTo("ExponentialHistogramAggregation{maxBuckets=1}");
.isEqualTo("ExponentialHistogramAggregation{maxBuckets=1,maxScale=0}");
}
@Test

View File

@ -208,7 +208,9 @@ class SdkDoubleHistogramTest {
.registerMetricReader(sdkMeterReader)
.registerView(
InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(),
View.builder().setAggregation(ExponentialHistogramAggregation.create(5)).build())
View.builder()
.setAggregation(ExponentialHistogramAggregation.create(5, 20))
.build())
.build();
DoubleHistogram doubleHistogram =
sdkMeterProvider

View File

@ -208,7 +208,9 @@ class SdkLongHistogramTest {
.registerMetricReader(sdkMeterReader)
.registerView(
InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(),
View.builder().setAggregation(ExponentialHistogramAggregation.create(5)).build())
View.builder()
.setAggregation(ExponentialHistogramAggregation.create(5, 20))
.build())
.build();
LongHistogram longHistogram =
sdkMeterProvider

View File

@ -49,7 +49,7 @@ class DoubleExponentialHistogramAggregatorTest {
@Mock ExemplarReservoir<DoubleExemplarData> reservoir;
private static final int STARTING_SCALE = 20;
private static final int MAX_SCALE = 20;
private static final DoubleExponentialHistogramAggregator aggregator =
new DoubleExponentialHistogramAggregator(ExemplarReservoir::doubleNoSamples, 160, 20);
private static final Resource RESOURCE = Resource.getDefault();
@ -62,7 +62,7 @@ class DoubleExponentialHistogramAggregatorTest {
return Stream.of(
aggregator,
new DoubleExponentialHistogramAggregator(
ExemplarReservoir::doubleNoSamples, 160, STARTING_SCALE));
ExemplarReservoir::doubleNoSamples, 160, MAX_SCALE));
}
private static int valueToIndex(int scale, double value) {
@ -89,10 +89,10 @@ class DoubleExponentialHistogramAggregatorTest {
.doAccumulateThenReset(Collections.emptyList());
assertThat(accumulation.getPositiveBuckets())
.isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class);
assertThat(accumulation.getPositiveBuckets().getScale()).isEqualTo(STARTING_SCALE);
assertThat(accumulation.getPositiveBuckets().getScale()).isEqualTo(MAX_SCALE);
assertThat(accumulation.getNegativeBuckets())
.isInstanceOf(DoubleExponentialHistogramAggregator.EmptyExponentialHistogramBuckets.class);
assertThat(accumulation.getNegativeBuckets().getScale()).isEqualTo(STARTING_SCALE);
assertThat(accumulation.getNegativeBuckets().getScale()).isEqualTo(MAX_SCALE);
}
@Test
@ -199,7 +199,7 @@ class DoubleExponentialHistogramAggregatorTest {
@Test
void testExemplarsInAccumulation() {
DoubleExponentialHistogramAggregator agg =
new DoubleExponentialHistogramAggregator(() -> reservoir, 160, STARTING_SCALE);
new DoubleExponentialHistogramAggregator(() -> reservoir, 160, MAX_SCALE);
Attributes attributes = Attributes.builder().put("test", "value").build();
DoubleExemplarData exemplar =
@ -437,7 +437,7 @@ class DoubleExponentialHistogramAggregatorTest {
Mockito.when(reservoirSupplier.get()).thenReturn(reservoir);
DoubleExponentialHistogramAggregator cumulativeAggregator =
new DoubleExponentialHistogramAggregator(reservoirSupplier, 160, STARTING_SCALE);
new DoubleExponentialHistogramAggregator(reservoirSupplier, 160, MAX_SCALE);
AggregatorHandle<ExponentialHistogramAccumulation, DoubleExemplarData> aggregatorHandle =
cumulativeAggregator.createHandle();

View File

@ -15,13 +15,19 @@ class ExponentialHistogramAggregationTest {
@Test
void goodConfig() {
assertThat(ExponentialHistogramAggregation.getDefault()).isNotNull();
assertThat(ExponentialHistogramAggregation.create(10)).isNotNull();
assertThat(ExponentialHistogramAggregation.create(10, 20)).isNotNull();
}
@Test
void badBuckets_throwArgumentException() {
assertThatThrownBy(() -> ExponentialHistogramAggregation.create(0))
void invalidConfig_Throws() {
assertThatThrownBy(() -> ExponentialHistogramAggregation.create(0, 20))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("maxBuckets must be > 0");
assertThatThrownBy(() -> ExponentialHistogramAggregation.create(1, 21))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("maxScale must be -10 <= x <= 20");
assertThatThrownBy(() -> ExponentialHistogramAggregation.create(1, -11))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("maxScale must be -10 <= x <= 20");
}
}