rename DoubleSum -> Sum (#3583)
* rename DoubleSum -> Sum * update changelog * revert pict changes
This commit is contained in:
parent
e69cebab8e
commit
aa60edff16
|
|
@ -2,6 +2,10 @@
|
|||
|
||||
## Unreleased
|
||||
|
||||
## 🛑 Breaking changes 🛑
|
||||
|
||||
- Rename `pdata.DoubleSum` to `pdata.Sum` (#3583)
|
||||
|
||||
## 🧰 Bug fixes 🧰
|
||||
|
||||
- `scraperhelper`: Include the scraper name in log messages (#3487)
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@ metrics:
|
|||
description: Total CPU seconds broken down by different states.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
monotonic: true
|
||||
aggregation: cumulative
|
||||
labels: [freeFormLabel, freeFormLabelWithValue, enumLabel]
|
||||
|
|
@ -53,7 +53,7 @@ metrics:
|
|||
description: Total CPU seconds broken down by different states.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
monotonic: true
|
||||
aggregation: cumulative
|
||||
labels: [missing]
|
||||
|
|
@ -97,7 +97,7 @@ func Test_loadMetadata(t *testing.T) {
|
|||
"system.cpu.time": {
|
||||
Description: "Total CPU seconds broken down by different states.",
|
||||
Unit: "s",
|
||||
Data: &doubleSum{
|
||||
Data: &sum{
|
||||
Aggregated: Aggregated{Aggregation: "cumulative"},
|
||||
Mono: Mono{Monotonic: true},
|
||||
},
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ metrics:
|
|||
description: Total CPU seconds broken down by different states.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
labels: []
|
||||
`
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ var (
|
|||
_ MetricData = &intSum{}
|
||||
_ MetricData = &intHistogram{}
|
||||
_ MetricData = &doubleGauge{}
|
||||
_ MetricData = &doubleSum{}
|
||||
_ MetricData = &sum{}
|
||||
_ MetricData = &histogram{}
|
||||
)
|
||||
|
||||
|
|
@ -52,8 +52,8 @@ func (e *ymlMetricData) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
md = &intHistogram{}
|
||||
case "double gauge":
|
||||
md = &doubleGauge{}
|
||||
case "double sum":
|
||||
md = &doubleSum{}
|
||||
case "sum":
|
||||
md = &sum{}
|
||||
case "histogram":
|
||||
md = &histogram{}
|
||||
default:
|
||||
|
|
@ -148,20 +148,20 @@ func (i intSum) HasAggregated() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
type doubleSum struct {
|
||||
type sum struct {
|
||||
Aggregated `yaml:",inline"`
|
||||
Mono `yaml:",inline"`
|
||||
}
|
||||
|
||||
func (d doubleSum) Type() string {
|
||||
return "DoubleSum"
|
||||
func (d sum) Type() string {
|
||||
return "Sum"
|
||||
}
|
||||
|
||||
func (d doubleSum) HasMonotonic() bool {
|
||||
func (d sum) HasMonotonic() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (d doubleSum) HasAggregated() bool {
|
||||
func (d sum) HasAggregated() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func TestMetricData(t *testing.T) {
|
|||
{&intSum{}, "IntSum", true, true},
|
||||
{&intHistogram{}, "IntHistogram", true, false},
|
||||
{&doubleGauge{}, "DoubleGauge", false, false},
|
||||
{&doubleSum{}, "DoubleSum", true, true},
|
||||
{&sum{}, "Sum", true, true},
|
||||
{&histogram{}, "Histogram", true, false},
|
||||
} {
|
||||
assert.Equal(t, arg.typ, arg.metricData.Type())
|
||||
|
|
|
|||
|
|
@ -169,8 +169,8 @@ var intSum = &messageValueStruct{
|
|||
}
|
||||
|
||||
var doubleSum = &messageValueStruct{
|
||||
structName: "DoubleSum",
|
||||
description: "// DoubleSum represents the type of a numeric double scalar metric that is calculated as a sum of all reported measurements over a time interval.",
|
||||
structName: "Sum",
|
||||
description: "// Sum represents the type of a numeric double scalar metric that is calculated as a sum of all reported measurements over a time interval.",
|
||||
originFullName: "otlpmetrics.Sum",
|
||||
fields: []baseField{
|
||||
aggregationTemporalityField,
|
||||
|
|
|
|||
|
|
@ -23,11 +23,11 @@ metrics:
|
|||
unit:
|
||||
# Required
|
||||
data:
|
||||
# Required: one of int gauge, int sum, int histogram, double gauge, double sum, or histogram.
|
||||
# Required: one of int gauge, int sum, int histogram, double gauge, sum, or histogram.
|
||||
type:
|
||||
# Required for int sum and double sum.
|
||||
# Required for int sum and sum.
|
||||
monotonic: # true | false
|
||||
# Required for int sum, int histogram, double sum, and histogram.
|
||||
# Required for int sum, int histogram, sum, and histogram.
|
||||
aggregation: # delta | cumulative
|
||||
# Optional: array of labels that were defined in the labels section that are emitted by this metric.
|
||||
labels:
|
||||
|
|
|
|||
|
|
@ -77,8 +77,8 @@ func addLabelsToMetric(metric *pdata.Metric, labelMap pdata.StringMap) {
|
|||
addLabelsToDoubleDataPoints(metric.DoubleGauge().DataPoints(), labelMap)
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
addLabelsToIntDataPoints(metric.IntSum().DataPoints(), labelMap)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
addLabelsToDoubleDataPoints(metric.DoubleSum().DataPoints(), labelMap)
|
||||
case pdata.MetricDataTypeSum:
|
||||
addLabelsToDoubleDataPoints(metric.Sum().DataPoints(), labelMap)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
addLabelsToIntHistogramDataPoints(metric.IntHistogram().DataPoints(), labelMap)
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
|
|||
assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).DoubleGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).IntGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).DoubleSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())
|
||||
|
|
@ -59,7 +59,7 @@ func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
|
|||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).Resource().Attributes().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).DoubleGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).IntGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).DoubleSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())
|
||||
|
|
@ -67,7 +67,7 @@ func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
|
|||
assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).DoubleGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).IntGauge().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).DoubleSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
|
||||
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())
|
||||
|
|
|
|||
|
|
@ -88,8 +88,8 @@ func (a *lastValueAccumulator) addMetric(metric pdata.Metric, il pdata.Instrumen
|
|||
return a.accumulateIntSum(metric, il, now)
|
||||
case pdata.MetricDataTypeDoubleGauge:
|
||||
return a.accumulateDoubleGauge(metric, il, now)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
return a.accumulateDoubleSum(metric, il, now)
|
||||
case pdata.MetricDataTypeSum:
|
||||
return a.accumulateSum(metric, il, now)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
return a.accumulateIntHistogram(metric, il, now)
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -232,8 +232,8 @@ func (a *lastValueAccumulator) accumulateIntSum(metric pdata.Metric, il pdata.In
|
|||
return
|
||||
}
|
||||
|
||||
func (a *lastValueAccumulator) accumulateDoubleSum(metric pdata.Metric, il pdata.InstrumentationLibrary, now time.Time) (n int) {
|
||||
doubleSum := metric.DoubleSum()
|
||||
func (a *lastValueAccumulator) accumulateSum(metric pdata.Metric, il pdata.InstrumentationLibrary, now time.Time) (n int) {
|
||||
doubleSum := metric.Sum()
|
||||
|
||||
// Drop metrics with non-cumulative aggregations
|
||||
if doubleSum.AggregationTemporality() != pdata.AggregationTemporalityCumulative {
|
||||
|
|
@ -249,24 +249,24 @@ func (a *lastValueAccumulator) accumulateDoubleSum(metric pdata.Metric, il pdata
|
|||
v, ok := a.registeredMetrics.Load(signature)
|
||||
if !ok {
|
||||
m := createMetric(metric)
|
||||
m.DoubleSum().SetIsMonotonic(metric.DoubleSum().IsMonotonic())
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
ip.CopyTo(m.DoubleSum().DataPoints().AppendEmpty())
|
||||
m.Sum().SetIsMonotonic(metric.Sum().IsMonotonic())
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
ip.CopyTo(m.Sum().DataPoints().AppendEmpty())
|
||||
a.registeredMetrics.Store(signature, &accumulatedValue{value: m, instrumentationLibrary: il, updated: now})
|
||||
n++
|
||||
continue
|
||||
}
|
||||
mv := v.(*accumulatedValue)
|
||||
|
||||
if ip.Timestamp().AsTime().Before(mv.value.DoubleSum().DataPoints().At(0).Timestamp().AsTime()) {
|
||||
if ip.Timestamp().AsTime().Before(mv.value.Sum().DataPoints().At(0).Timestamp().AsTime()) {
|
||||
// only keep datapoint with latest timestamp
|
||||
continue
|
||||
}
|
||||
|
||||
m := createMetric(metric)
|
||||
m.DoubleSum().SetIsMonotonic(metric.DoubleSum().IsMonotonic())
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
ip.CopyTo(m.DoubleSum().DataPoints().AppendEmpty())
|
||||
m.Sum().SetIsMonotonic(metric.Sum().IsMonotonic())
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
ip.CopyTo(m.Sum().DataPoints().AppendEmpty())
|
||||
a.registeredMetrics.Store(signature, &accumulatedValue{value: m, instrumentationLibrary: il, updated: now})
|
||||
n++
|
||||
}
|
||||
|
|
|
|||
|
|
@ -52,12 +52,12 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "DoubleSum",
|
||||
name: "Sum",
|
||||
fillMetric: func(ts time.Time, metric pdata.Metric) {
|
||||
metric.SetName("test_metric")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(42.42)
|
||||
dp.LabelsMap().Insert("label_1", "1")
|
||||
dp.LabelsMap().Insert("label_2", "2")
|
||||
|
|
@ -168,15 +168,15 @@ func TestAccumulateMetrics(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "DoubleSum",
|
||||
name: "Sum",
|
||||
metric: func(ts time.Time, v float64, metrics pdata.MetricSlice) {
|
||||
metric := metrics.AppendEmpty()
|
||||
metric.SetName("test_metric")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(false)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(false)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDescription("test description")
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(v)
|
||||
dp.LabelsMap().Insert("label_1", "1")
|
||||
dp.LabelsMap().Insert("label_2", "2")
|
||||
|
|
@ -200,15 +200,15 @@ func TestAccumulateMetrics(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "MonotonicDoubleSum",
|
||||
name: "MonotonicSum",
|
||||
metric: func(ts time.Time, v float64, metrics pdata.MetricSlice) {
|
||||
metric := metrics.AppendEmpty()
|
||||
metric.SetName("test_metric")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDescription("test description")
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(v)
|
||||
dp.LabelsMap().Insert("label_1", "1")
|
||||
dp.LabelsMap().Insert("label_2", "2")
|
||||
|
|
@ -345,12 +345,12 @@ func getMerticProperties(metric pdata.Metric) (
|
|||
value = metric.DoubleGauge().DataPoints().At(0).Value()
|
||||
temporality = pdata.AggregationTemporalityUnspecified
|
||||
isMonotonic = false
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
labels = metric.DoubleSum().DataPoints().At(0).LabelsMap()
|
||||
ts = metric.DoubleSum().DataPoints().At(0).Timestamp().AsTime()
|
||||
value = metric.DoubleSum().DataPoints().At(0).Value()
|
||||
temporality = metric.DoubleSum().AggregationTemporality()
|
||||
isMonotonic = metric.DoubleSum().IsMonotonic()
|
||||
case pdata.MetricDataTypeSum:
|
||||
labels = metric.Sum().DataPoints().At(0).LabelsMap()
|
||||
ts = metric.Sum().DataPoints().At(0).Timestamp().AsTime()
|
||||
value = metric.Sum().DataPoints().At(0).Value()
|
||||
temporality = metric.Sum().AggregationTemporality()
|
||||
isMonotonic = metric.Sum().IsMonotonic()
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
labels = metric.IntHistogram().DataPoints().At(0).LabelsMap()
|
||||
ts = metric.IntHistogram().DataPoints().At(0).Timestamp().AsTime()
|
||||
|
|
|
|||
|
|
@ -64,8 +64,8 @@ func (c *collector) convertMetric(metric pdata.Metric) (prometheus.Metric, error
|
|||
return c.convertIntSum(metric)
|
||||
case pdata.MetricDataTypeDoubleGauge:
|
||||
return c.convertDoubleGauge(metric)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
return c.convertDoubleSum(metric)
|
||||
case pdata.MetricDataTypeSum:
|
||||
return c.convertSum(metric)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
return c.convertIntHistogram(metric)
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -152,11 +152,11 @@ func (c *collector) convertIntSum(metric pdata.Metric) (prometheus.Metric, error
|
|||
return m, nil
|
||||
}
|
||||
|
||||
func (c *collector) convertDoubleSum(metric pdata.Metric) (prometheus.Metric, error) {
|
||||
ip := metric.DoubleSum().DataPoints().At(0)
|
||||
func (c *collector) convertSum(metric pdata.Metric) (prometheus.Metric, error) {
|
||||
ip := metric.Sum().DataPoints().At(0)
|
||||
|
||||
metricType := prometheus.GaugeValue
|
||||
if metric.DoubleSum().IsMonotonic() {
|
||||
if metric.Sum().IsMonotonic() {
|
||||
metricType = prometheus.CounterValue
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ func TestConvertInvalidMetric(t *testing.T) {
|
|||
for _, mType := range []pdata.MetricDataType{
|
||||
pdata.MetricDataTypeHistogram,
|
||||
pdata.MetricDataTypeIntHistogram,
|
||||
pdata.MetricDataTypeDoubleSum,
|
||||
pdata.MetricDataTypeSum,
|
||||
pdata.MetricDataTypeIntSum,
|
||||
pdata.MetricDataTypeDoubleGauge,
|
||||
pdata.MetricDataTypeIntGauge,
|
||||
|
|
@ -83,8 +83,8 @@ func TestConvertInvalidMetric(t *testing.T) {
|
|||
metric.IntSum().DataPoints().AppendEmpty()
|
||||
case pdata.MetricDataTypeDoubleGauge:
|
||||
metric.DoubleGauge().DataPoints().AppendEmpty()
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
case pdata.MetricDataTypeSum:
|
||||
metric.Sum().DataPoints().AppendEmpty()
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
metric.IntHistogram().DataPoints().AppendEmpty()
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -225,17 +225,17 @@ func TestCollectMetrics(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "DoubleSum",
|
||||
name: "Sum",
|
||||
metricType: prometheus.GaugeValue,
|
||||
value: 42.42,
|
||||
metric: func(ts time.Time) (metric pdata.Metric) {
|
||||
metric = pdata.NewMetric()
|
||||
metric.SetName("test_metric")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(false)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(false)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDescription("test description")
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(42.42)
|
||||
dp.LabelsMap().Insert("label_1", "1")
|
||||
dp.LabelsMap().Insert("label_2", "2")
|
||||
|
|
@ -265,17 +265,17 @@ func TestCollectMetrics(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "MonotonicDoubleSum",
|
||||
name: "MonotonicSum",
|
||||
metricType: prometheus.CounterValue,
|
||||
value: 42.42,
|
||||
metric: func(ts time.Time) (metric pdata.Metric) {
|
||||
metric = pdata.NewMetric()
|
||||
metric.SetName("test_metric")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDescription("test description")
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(42.42)
|
||||
dp.LabelsMap().Insert("label_1", "1")
|
||||
dp.LabelsMap().Insert("label_2", "2")
|
||||
|
|
|
|||
|
|
@ -142,8 +142,8 @@ func (prwe *PRWExporter) PushMetrics(ctx context.Context, md pdata.Metrics) erro
|
|||
dropped++
|
||||
errs = append(errs, err)
|
||||
}
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
dataPoints := metric.DoubleSum().DataPoints()
|
||||
case pdata.MetricDataTypeSum:
|
||||
dataPoints := metric.Sum().DataPoints()
|
||||
if err := prwe.addDoubleDataPointSlice(dataPoints, tsMap, resource, metric); err != nil {
|
||||
dropped++
|
||||
errs = append(errs, err)
|
||||
|
|
|
|||
|
|
@ -357,7 +357,7 @@ func Test_PushMetrics(t *testing.T) {
|
|||
// success cases
|
||||
intSumBatch := testdata.GenerateMetricsManyMetricsSameResource(10)
|
||||
|
||||
doubleSumBatch := getMetricsFromMetricList(validMetrics1[validDoubleSum], validMetrics2[validDoubleSum])
|
||||
sumBatch := getMetricsFromMetricList(validMetrics1[validSum], validMetrics2[validSum])
|
||||
|
||||
intGaugeBatch := getMetricsFromMetricList(validMetrics1[validIntGauge], validMetrics2[validIntGauge])
|
||||
|
||||
|
|
@ -381,7 +381,7 @@ func Test_PushMetrics(t *testing.T) {
|
|||
|
||||
emptyCumulativeIntSumBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeIntSum])
|
||||
|
||||
emptyCumulativeDoubleSumBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeDoubleSum])
|
||||
emptyCumulativeSumBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeSum])
|
||||
|
||||
emptyCumulativeIntHistogramBatch := getMetricsFromMetricList(invalidMetrics[emptyCumulativeIntHistogram])
|
||||
|
||||
|
|
@ -434,7 +434,7 @@ func Test_PushMetrics(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"doubleSum_case",
|
||||
&doubleSumBatch,
|
||||
&sumBatch,
|
||||
checkFunc,
|
||||
2,
|
||||
http.StatusAccepted,
|
||||
|
|
@ -521,8 +521,8 @@ func Test_PushMetrics(t *testing.T) {
|
|||
true,
|
||||
},
|
||||
{
|
||||
"emptyCumulativeDoubleSum_case",
|
||||
&emptyCumulativeDoubleSumBatch,
|
||||
"emptyCumulativeSum_case",
|
||||
&emptyCumulativeSumBatch,
|
||||
checkFunc,
|
||||
0,
|
||||
http.StatusAccepted,
|
||||
|
|
|
|||
|
|
@ -55,8 +55,8 @@ func validateMetrics(metric pdata.Metric) bool {
|
|||
return metric.DoubleGauge().DataPoints().Len() != 0
|
||||
case pdata.MetricDataTypeIntGauge:
|
||||
return metric.IntGauge().DataPoints().Len() != 0
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
return metric.DoubleSum().DataPoints().Len() != 0 && metric.DoubleSum().AggregationTemporality() == pdata.AggregationTemporalityCumulative
|
||||
case pdata.MetricDataTypeSum:
|
||||
return metric.Sum().DataPoints().Len() != 0 && metric.Sum().AggregationTemporality() == pdata.AggregationTemporalityCumulative
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
return metric.IntSum().DataPoints().Len() != 0 && metric.IntSum().AggregationTemporality() == pdata.AggregationTemporalityCumulative
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ var (
|
|||
validIntGauge = "valid_IntGauge"
|
||||
validDoubleGauge = "valid_DoubleGauge"
|
||||
validIntSum = "valid_IntSum"
|
||||
validDoubleSum = "valid_DoubleSum"
|
||||
validSum = "valid_Sum"
|
||||
validIntHistogram = "valid_IntHistogram"
|
||||
validHistogram = "valid_Histogram"
|
||||
validSummary = "valid_Summary"
|
||||
|
|
@ -106,7 +106,7 @@ var (
|
|||
validDoubleGauge: getDoubleGaugeMetric(validDoubleGauge, lbs1, floatVal1, time1),
|
||||
validIntSum: getIntSumMetric(validIntSum, lbs1, intVal1, time1),
|
||||
suffixedCounter: getIntSumMetric(suffixedCounter, lbs1, intVal1, time1),
|
||||
validDoubleSum: getDoubleSumMetric(validDoubleSum, lbs1, floatVal1, time1),
|
||||
validSum: getSumMetric(validSum, lbs1, floatVal1, time1),
|
||||
validIntHistogram: getIntHistogramMetric(validIntHistogram, lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets),
|
||||
validHistogram: getHistogramMetric(validHistogram, lbs1, time1, floatVal1, uint64(intVal1), bounds, buckets),
|
||||
validSummary: getSummaryMetric(validSummary, lbs1, time1, floatVal1, uint64(intVal1), quantiles),
|
||||
|
|
@ -115,7 +115,7 @@ var (
|
|||
validIntGauge: getIntGaugeMetric(validIntGauge, lbs2, intVal2, time2),
|
||||
validDoubleGauge: getDoubleGaugeMetric(validDoubleGauge, lbs2, floatVal2, time2),
|
||||
validIntSum: getIntSumMetric(validIntSum, lbs2, intVal2, time2),
|
||||
validDoubleSum: getDoubleSumMetric(validDoubleSum, lbs2, floatVal2, time2),
|
||||
validSum: getSumMetric(validSum, lbs2, floatVal2, time2),
|
||||
validIntHistogram: getIntHistogramMetric(validIntHistogram, lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets),
|
||||
validHistogram: getHistogramMetric(validHistogram, lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets),
|
||||
validSummary: getSummaryMetric(validSummary, lbs2, time2, floatVal2, uint64(intVal2), quantiles),
|
||||
|
|
@ -130,14 +130,14 @@ var (
|
|||
emptyIntGauge = "emptyIntGauge"
|
||||
emptyDoubleGauge = "emptyDoubleGauge"
|
||||
emptyIntSum = "emptyIntSum"
|
||||
emptyDoubleSum = "emptyDoubleSum"
|
||||
emptySum = "emptySum"
|
||||
emptyIntHistogram = "emptyIntHistogram"
|
||||
emptyHistogram = "emptyHistogram"
|
||||
emptySummary = "emptySummary"
|
||||
|
||||
// Category 2: invalid type and temporality combination
|
||||
emptyCumulativeIntSum = "emptyCumulativeIntSum"
|
||||
emptyCumulativeDoubleSum = "emptyCumulativeDoubleSum"
|
||||
emptyCumulativeSum = "emptyCumulativeSum"
|
||||
emptyCumulativeIntHistogram = "emptyCumulativeIntHistogram"
|
||||
emptyCumulativeHistogram = "emptyCumulativeHistogram"
|
||||
|
||||
|
|
@ -147,12 +147,12 @@ var (
|
|||
emptyIntGauge: getEmptyIntGaugeMetric(emptyIntGauge),
|
||||
emptyDoubleGauge: getEmptyDoubleGaugeMetric(emptyDoubleGauge),
|
||||
emptyIntSum: getEmptyIntSumMetric(emptyIntSum),
|
||||
emptyDoubleSum: getEmptyDoubleSumMetric(emptyDoubleSum),
|
||||
emptySum: getEmptySumMetric(emptySum),
|
||||
emptyIntHistogram: getEmptyIntHistogramMetric(emptyIntHistogram),
|
||||
emptyHistogram: getEmptyHistogramMetric(emptyHistogram),
|
||||
emptySummary: getEmptySummaryMetric(emptySummary),
|
||||
emptyCumulativeIntSum: getEmptyCumulativeIntSumMetric(emptyCumulativeIntSum),
|
||||
emptyCumulativeDoubleSum: getEmptyCumulativeDoubleSumMetric(emptyCumulativeDoubleSum),
|
||||
emptyCumulativeSum: getEmptyCumulativeSumMetric(emptyCumulativeSum),
|
||||
emptyCumulativeIntHistogram: getEmptyCumulativeIntHistogramMetric(emptyCumulativeIntHistogram),
|
||||
emptyCumulativeHistogram: getEmptyCumulativeHistogramMetric(emptyCumulativeHistogram),
|
||||
}
|
||||
|
|
@ -302,27 +302,27 @@ func getIntSumMetric(name string, labels pdata.StringMap, value int64, ts uint64
|
|||
return metric
|
||||
}
|
||||
|
||||
func getEmptyDoubleSumMetric(name string) pdata.Metric {
|
||||
func getEmptySumMetric(name string) pdata.Metric {
|
||||
metric := pdata.NewMetric()
|
||||
metric.SetName(name)
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
return metric
|
||||
}
|
||||
|
||||
func getEmptyCumulativeDoubleSumMetric(name string) pdata.Metric {
|
||||
func getEmptyCumulativeSumMetric(name string) pdata.Metric {
|
||||
metric := pdata.NewMetric()
|
||||
metric.SetName(name)
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
return metric
|
||||
}
|
||||
|
||||
func getDoubleSumMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric {
|
||||
func getSumMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric {
|
||||
metric := pdata.NewMetric()
|
||||
metric.SetName(name)
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
dp := metric.DoubleSum().DataPoints().AppendEmpty()
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
dp := metric.Sum().DataPoints().AppendEmpty()
|
||||
dp.SetValue(value)
|
||||
|
||||
labels.Range(func(k string, v string) bool {
|
||||
|
|
|
|||
|
|
@ -129,9 +129,9 @@ func (g *metricGenerator) populateMetrics(cfg MetricsCfg, ilm pdata.Instrumentat
|
|||
sum.SetIsMonotonic(cfg.IsMonotonicSum)
|
||||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
populateIntPoints(cfg, sum.DataPoints())
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
sum := metric.DoubleSum()
|
||||
case pdata.MetricDataTypeSum:
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
sum := metric.Sum()
|
||||
sum.SetIsMonotonic(cfg.IsMonotonicSum)
|
||||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
populateDoublePoints(cfg, sum.DataPoints())
|
||||
|
|
|
|||
|
|
@ -75,10 +75,10 @@ func pictToCfg(inputs PICTMetricInputs) MetricsCfg {
|
|||
case MetricTypeDoubleGauge:
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeDoubleGauge
|
||||
case MetricTypeMonotonicDoubleSum:
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeDoubleSum
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeSum
|
||||
cfg.IsMonotonicSum = true
|
||||
case MetricTypeNonMonotonicDoubleSum:
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeDoubleSum
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeSum
|
||||
cfg.IsMonotonicSum = false
|
||||
case MetricTypeIntHistogram:
|
||||
cfg.MetricDescriptorType = pdata.MetricDataTypeIntHistogram
|
||||
|
|
|
|||
|
|
@ -89,8 +89,8 @@ func (b *dataBuffer) logMetricDataPoints(m pdata.Metric) {
|
|||
b.logEntry(" -> IsMonotonic: %t", data.IsMonotonic())
|
||||
b.logEntry(" -> AggregationTemporality: %s", data.AggregationTemporality().String())
|
||||
b.logIntDataPoints(data.DataPoints())
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
data := m.DoubleSum()
|
||||
case pdata.MetricDataTypeSum:
|
||||
data := m.Sum()
|
||||
b.logEntry(" -> IsMonotonic: %t", data.IsMonotonic())
|
||||
b.logEntry(" -> AggregationTemporality: %s", data.AggregationTemporality().String())
|
||||
b.logDoubleDataPoints(data.DataPoints())
|
||||
|
|
|
|||
|
|
@ -50,8 +50,8 @@ func (m *Matcher) MatchMetric(metric pdata.Metric) (bool, error) {
|
|||
return m.matchDoubleGauge(metricName, metric.DoubleGauge())
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
return m.matchIntSum(metricName, metric.IntSum())
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
return m.matchDoubleSum(metricName, metric.DoubleSum())
|
||||
case pdata.MetricDataTypeSum:
|
||||
return m.matchSum(metricName, metric.Sum())
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
return m.matchIntHistogram(metricName, metric.IntHistogram())
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -89,7 +89,7 @@ func (m *Matcher) matchDoubleGauge(metricName string, gauge pdata.DoubleGauge) (
|
|||
return false, nil
|
||||
}
|
||||
|
||||
func (m *Matcher) matchDoubleSum(metricName string, sum pdata.DoubleSum) (bool, error) {
|
||||
func (m *Matcher) matchSum(metricName string, sum pdata.Sum) (bool, error) {
|
||||
pts := sum.DataPoints()
|
||||
for i := 0; i < pts.Len(); i++ {
|
||||
matched, err := m.matchEnv(metricName, pts.At(i).LabelsMap())
|
||||
|
|
|
|||
|
|
@ -56,8 +56,8 @@ func TestNilDoubleGauge(t *testing.T) {
|
|||
testNilValue(t, dataType)
|
||||
}
|
||||
|
||||
func TestNilDoubleSum(t *testing.T) {
|
||||
dataType := pdata.MetricDataTypeDoubleSum
|
||||
func TestNilSum(t *testing.T) {
|
||||
dataType := pdata.MetricDataTypeSum
|
||||
testNilValue(t, dataType)
|
||||
}
|
||||
|
||||
|
|
@ -111,13 +111,13 @@ func TestDoubleGaugeEmptyDataPoint(t *testing.T) {
|
|||
assert.True(t, matched)
|
||||
}
|
||||
|
||||
func TestDoubleSumEmptyDataPoint(t *testing.T) {
|
||||
func TestSumEmptyDataPoint(t *testing.T) {
|
||||
matcher, err := NewMatcher(`MetricName == 'my.metric'`)
|
||||
require.NoError(t, err)
|
||||
m := pdata.NewMetric()
|
||||
m.SetName("my.metric")
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
m.DoubleSum().DataPoints().AppendEmpty()
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
m.Sum().DataPoints().AppendEmpty()
|
||||
matched, err := matcher.MatchMetric(m)
|
||||
assert.NoError(t, err)
|
||||
assert.True(t, matched)
|
||||
|
|
@ -248,21 +248,21 @@ func testMatchDoubleGauge(t *testing.T, metricName string) bool {
|
|||
return match
|
||||
}
|
||||
|
||||
func TestMatchDoubleSumByMetricName(t *testing.T) {
|
||||
assert.True(t, matchDoubleSum(t, "my.metric"))
|
||||
func TestMatchSumByMetricName(t *testing.T) {
|
||||
assert.True(t, matchSum(t, "my.metric"))
|
||||
}
|
||||
|
||||
func TestNonMatchDoubleSumByMetricName(t *testing.T) {
|
||||
assert.False(t, matchDoubleSum(t, "foo.metric"))
|
||||
func TestNonMatchSumByMetricName(t *testing.T) {
|
||||
assert.False(t, matchSum(t, "foo.metric"))
|
||||
}
|
||||
|
||||
func matchDoubleSum(t *testing.T, metricName string) bool {
|
||||
func matchSum(t *testing.T, metricName string) bool {
|
||||
matcher, err := NewMatcher(`MetricName == 'my.metric'`)
|
||||
require.NoError(t, err)
|
||||
m := pdata.NewMetric()
|
||||
m.SetName(metricName)
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
dps := m.DoubleSum().DataPoints()
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
dps := m.Sum().DataPoints()
|
||||
dps.AppendEmpty()
|
||||
matched, err := matcher.MatchMetric(m)
|
||||
assert.NoError(t, err)
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ func GenerateMetricsAllTypesNoDataPoints() pdata.Metrics {
|
|||
ms := ilm0.Metrics()
|
||||
initMetric(ms.AppendEmpty(), TestGaugeDoubleMetricName, pdata.MetricDataTypeDoubleGauge)
|
||||
initMetric(ms.AppendEmpty(), TestGaugeIntMetricName, pdata.MetricDataTypeIntGauge)
|
||||
initMetric(ms.AppendEmpty(), TestCounterDoubleMetricName, pdata.MetricDataTypeDoubleSum)
|
||||
initMetric(ms.AppendEmpty(), TestCounterDoubleMetricName, pdata.MetricDataTypeSum)
|
||||
initMetric(ms.AppendEmpty(), TestCounterIntMetricName, pdata.MetricDataTypeIntSum)
|
||||
initMetric(ms.AppendEmpty(), TestDoubleHistogramMetricName, pdata.MetricDataTypeHistogram)
|
||||
initMetric(ms.AppendEmpty(), TestIntHistogramMetricName, pdata.MetricDataTypeIntHistogram)
|
||||
|
|
@ -132,8 +132,8 @@ func GenerateMetricsAllTypesEmptyDataPoint() pdata.Metrics {
|
|||
initMetric(intGauge, TestGaugeIntMetricName, pdata.MetricDataTypeIntGauge)
|
||||
intGauge.IntGauge().DataPoints().AppendEmpty()
|
||||
doubleSum := ms.AppendEmpty()
|
||||
initMetric(doubleSum, TestCounterDoubleMetricName, pdata.MetricDataTypeDoubleSum)
|
||||
doubleSum.DoubleSum().DataPoints().AppendEmpty()
|
||||
initMetric(doubleSum, TestCounterDoubleMetricName, pdata.MetricDataTypeSum)
|
||||
doubleSum.Sum().DataPoints().AppendEmpty()
|
||||
intSum := ms.AppendEmpty()
|
||||
initMetric(intSum, TestCounterIntMetricName, pdata.MetricDataTypeIntSum)
|
||||
intSum.IntSum().DataPoints().AppendEmpty()
|
||||
|
|
@ -199,9 +199,9 @@ func initGaugeIntMetricOneDataPoint(im pdata.Metric) {
|
|||
}
|
||||
|
||||
func initSumDoubleMetric(dm pdata.Metric) {
|
||||
initMetric(dm, TestCounterDoubleMetricName, pdata.MetricDataTypeDoubleSum)
|
||||
initMetric(dm, TestCounterDoubleMetricName, pdata.MetricDataTypeSum)
|
||||
|
||||
ddps := dm.DoubleSum().DataPoints()
|
||||
ddps := dm.Sum().DataPoints()
|
||||
ddp0 := ddps.AppendEmpty()
|
||||
initMetricLabels12(ddp0.LabelsMap())
|
||||
ddp0.SetStartTimestamp(TestMetricStartTimestamp)
|
||||
|
|
@ -295,8 +295,8 @@ func initMetric(m pdata.Metric, name string, ty pdata.MetricDataType) {
|
|||
sum := m.IntSum()
|
||||
sum.SetIsMonotonic(true)
|
||||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
sum := m.DoubleSum()
|
||||
case pdata.MetricDataTypeSum:
|
||||
sum := m.Sum()
|
||||
sum.SetIsMonotonic(true)
|
||||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
|
|
|
|||
|
|
@ -675,55 +675,55 @@ func (ms IntSum) CopyTo(dest IntSum) {
|
|||
ms.DataPoints().CopyTo(dest.DataPoints())
|
||||
}
|
||||
|
||||
// DoubleSum represents the type of a numeric double scalar metric that is calculated as a sum of all reported measurements over a time interval.
|
||||
// Sum represents the type of a numeric double scalar metric that is calculated as a sum of all reported measurements over a time interval.
|
||||
//
|
||||
// This is a reference type, if passed by value and callee modifies it the
|
||||
// caller will see the modification.
|
||||
//
|
||||
// Must use NewDoubleSum function to create new instances.
|
||||
// Must use NewSum function to create new instances.
|
||||
// Important: zero-initialized instance is not valid for use.
|
||||
type DoubleSum struct {
|
||||
type Sum struct {
|
||||
orig *otlpmetrics.Sum
|
||||
}
|
||||
|
||||
func newDoubleSum(orig *otlpmetrics.Sum) DoubleSum {
|
||||
return DoubleSum{orig: orig}
|
||||
func newSum(orig *otlpmetrics.Sum) Sum {
|
||||
return Sum{orig: orig}
|
||||
}
|
||||
|
||||
// NewDoubleSum creates a new empty DoubleSum.
|
||||
// NewSum creates a new empty Sum.
|
||||
//
|
||||
// This must be used only in testing code since no "Set" method available.
|
||||
func NewDoubleSum() DoubleSum {
|
||||
return newDoubleSum(&otlpmetrics.Sum{})
|
||||
func NewSum() Sum {
|
||||
return newSum(&otlpmetrics.Sum{})
|
||||
}
|
||||
|
||||
// AggregationTemporality returns the aggregationtemporality associated with this DoubleSum.
|
||||
func (ms DoubleSum) AggregationTemporality() AggregationTemporality {
|
||||
// AggregationTemporality returns the aggregationtemporality associated with this Sum.
|
||||
func (ms Sum) AggregationTemporality() AggregationTemporality {
|
||||
return AggregationTemporality((*ms.orig).AggregationTemporality)
|
||||
}
|
||||
|
||||
// SetAggregationTemporality replaces the aggregationtemporality associated with this DoubleSum.
|
||||
func (ms DoubleSum) SetAggregationTemporality(v AggregationTemporality) {
|
||||
// SetAggregationTemporality replaces the aggregationtemporality associated with this Sum.
|
||||
func (ms Sum) SetAggregationTemporality(v AggregationTemporality) {
|
||||
(*ms.orig).AggregationTemporality = otlpmetrics.AggregationTemporality(v)
|
||||
}
|
||||
|
||||
// IsMonotonic returns the ismonotonic associated with this DoubleSum.
|
||||
func (ms DoubleSum) IsMonotonic() bool {
|
||||
// IsMonotonic returns the ismonotonic associated with this Sum.
|
||||
func (ms Sum) IsMonotonic() bool {
|
||||
return (*ms.orig).IsMonotonic
|
||||
}
|
||||
|
||||
// SetIsMonotonic replaces the ismonotonic associated with this DoubleSum.
|
||||
func (ms DoubleSum) SetIsMonotonic(v bool) {
|
||||
// SetIsMonotonic replaces the ismonotonic associated with this Sum.
|
||||
func (ms Sum) SetIsMonotonic(v bool) {
|
||||
(*ms.orig).IsMonotonic = v
|
||||
}
|
||||
|
||||
// DataPoints returns the DataPoints associated with this DoubleSum.
|
||||
func (ms DoubleSum) DataPoints() DoubleDataPointSlice {
|
||||
// DataPoints returns the DataPoints associated with this Sum.
|
||||
func (ms Sum) DataPoints() DoubleDataPointSlice {
|
||||
return newDoubleDataPointSlice(&(*ms.orig).DataPoints)
|
||||
}
|
||||
|
||||
// CopyTo copies all properties from the current struct to the dest.
|
||||
func (ms DoubleSum) CopyTo(dest DoubleSum) {
|
||||
func (ms Sum) CopyTo(dest Sum) {
|
||||
dest.SetAggregationTemporality(ms.AggregationTemporality())
|
||||
dest.SetIsMonotonic(ms.IsMonotonic())
|
||||
ms.DataPoints().CopyTo(dest.DataPoints())
|
||||
|
|
|
|||
|
|
@ -504,30 +504,30 @@ func TestIntSum_DataPoints(t *testing.T) {
|
|||
assert.EqualValues(t, testValDataPoints, ms.DataPoints())
|
||||
}
|
||||
|
||||
func TestDoubleSum_CopyTo(t *testing.T) {
|
||||
ms := NewDoubleSum()
|
||||
generateTestDoubleSum().CopyTo(ms)
|
||||
assert.EqualValues(t, generateTestDoubleSum(), ms)
|
||||
func TestSum_CopyTo(t *testing.T) {
|
||||
ms := NewSum()
|
||||
generateTestSum().CopyTo(ms)
|
||||
assert.EqualValues(t, generateTestSum(), ms)
|
||||
}
|
||||
|
||||
func TestDoubleSum_AggregationTemporality(t *testing.T) {
|
||||
ms := NewDoubleSum()
|
||||
func TestSum_AggregationTemporality(t *testing.T) {
|
||||
ms := NewSum()
|
||||
assert.EqualValues(t, AggregationTemporalityUnspecified, ms.AggregationTemporality())
|
||||
testValAggregationTemporality := AggregationTemporalityCumulative
|
||||
ms.SetAggregationTemporality(testValAggregationTemporality)
|
||||
assert.EqualValues(t, testValAggregationTemporality, ms.AggregationTemporality())
|
||||
}
|
||||
|
||||
func TestDoubleSum_IsMonotonic(t *testing.T) {
|
||||
ms := NewDoubleSum()
|
||||
func TestSum_IsMonotonic(t *testing.T) {
|
||||
ms := NewSum()
|
||||
assert.EqualValues(t, false, ms.IsMonotonic())
|
||||
testValIsMonotonic := true
|
||||
ms.SetIsMonotonic(testValIsMonotonic)
|
||||
assert.EqualValues(t, testValIsMonotonic, ms.IsMonotonic())
|
||||
}
|
||||
|
||||
func TestDoubleSum_DataPoints(t *testing.T) {
|
||||
ms := NewDoubleSum()
|
||||
func TestSum_DataPoints(t *testing.T) {
|
||||
ms := NewSum()
|
||||
assert.EqualValues(t, NewDoubleDataPointSlice(), ms.DataPoints())
|
||||
fillTestDoubleDataPointSlice(ms.DataPoints())
|
||||
testValDataPoints := generateTestDoubleDataPointSlice()
|
||||
|
|
@ -2003,13 +2003,13 @@ func fillTestIntSum(tv IntSum) {
|
|||
fillTestIntDataPointSlice(tv.DataPoints())
|
||||
}
|
||||
|
||||
func generateTestDoubleSum() DoubleSum {
|
||||
tv := NewDoubleSum()
|
||||
fillTestDoubleSum(tv)
|
||||
func generateTestSum() Sum {
|
||||
tv := NewSum()
|
||||
fillTestSum(tv)
|
||||
return tv
|
||||
}
|
||||
|
||||
func fillTestDoubleSum(tv DoubleSum) {
|
||||
func fillTestSum(tv Sum) {
|
||||
tv.SetAggregationTemporality(AggregationTemporalityCumulative)
|
||||
tv.SetIsMonotonic(true)
|
||||
fillTestDoubleDataPointSlice(tv.DataPoints())
|
||||
|
|
|
|||
|
|
@ -111,8 +111,8 @@ func (md Metrics) DataPointCount() (dataPointCount int) {
|
|||
dataPointCount += m.DoubleGauge().DataPoints().Len()
|
||||
case MetricDataTypeIntSum:
|
||||
dataPointCount += m.IntSum().DataPoints().Len()
|
||||
case MetricDataTypeDoubleSum:
|
||||
dataPointCount += m.DoubleSum().DataPoints().Len()
|
||||
case MetricDataTypeSum:
|
||||
dataPointCount += m.Sum().DataPoints().Len()
|
||||
case MetricDataTypeIntHistogram:
|
||||
dataPointCount += m.IntHistogram().DataPoints().Len()
|
||||
case MetricDataTypeHistogram:
|
||||
|
|
@ -134,7 +134,7 @@ const (
|
|||
MetricDataTypeIntGauge
|
||||
MetricDataTypeDoubleGauge
|
||||
MetricDataTypeIntSum
|
||||
MetricDataTypeDoubleSum
|
||||
MetricDataTypeSum
|
||||
MetricDataTypeIntHistogram
|
||||
MetricDataTypeHistogram
|
||||
MetricDataTypeSummary
|
||||
|
|
@ -151,8 +151,8 @@ func (mdt MetricDataType) String() string {
|
|||
return "DoubleGauge"
|
||||
case MetricDataTypeIntSum:
|
||||
return "IntSum"
|
||||
case MetricDataTypeDoubleSum:
|
||||
return "DoubleSum"
|
||||
case MetricDataTypeSum:
|
||||
return "Sum"
|
||||
case MetricDataTypeIntHistogram:
|
||||
return "IntHistogram"
|
||||
case MetricDataTypeHistogram:
|
||||
|
|
@ -174,7 +174,7 @@ func (ms Metric) DataType() MetricDataType {
|
|||
case *otlpmetrics.Metric_IntSum:
|
||||
return MetricDataTypeIntSum
|
||||
case *otlpmetrics.Metric_Sum:
|
||||
return MetricDataTypeDoubleSum
|
||||
return MetricDataTypeSum
|
||||
case *otlpmetrics.Metric_IntHistogram:
|
||||
return MetricDataTypeIntHistogram
|
||||
case *otlpmetrics.Metric_Histogram:
|
||||
|
|
@ -195,7 +195,7 @@ func (ms Metric) SetDataType(ty MetricDataType) {
|
|||
ms.orig.Data = &otlpmetrics.Metric_Gauge{Gauge: &otlpmetrics.Gauge{}}
|
||||
case MetricDataTypeIntSum:
|
||||
ms.orig.Data = &otlpmetrics.Metric_IntSum{IntSum: &otlpmetrics.IntSum{}}
|
||||
case MetricDataTypeDoubleSum:
|
||||
case MetricDataTypeSum:
|
||||
ms.orig.Data = &otlpmetrics.Metric_Sum{Sum: &otlpmetrics.Sum{}}
|
||||
case MetricDataTypeIntHistogram:
|
||||
ms.orig.Data = &otlpmetrics.Metric_IntHistogram{IntHistogram: &otlpmetrics.IntHistogram{}}
|
||||
|
|
@ -227,11 +227,11 @@ func (ms Metric) IntSum() IntSum {
|
|||
return newIntSum(ms.orig.Data.(*otlpmetrics.Metric_IntSum).IntSum)
|
||||
}
|
||||
|
||||
// DoubleSum returns the data as DoubleSum.
|
||||
// Calling this function when DataType() != MetricDataTypeDoubleSum will cause a panic.
|
||||
// Sum returns the data as Sum.
|
||||
// Calling this function when DataType() != MetricDataTypeSum will cause a panic.
|
||||
// Calling this function on zero-initialized Metric will cause a panic.
|
||||
func (ms Metric) DoubleSum() DoubleSum {
|
||||
return newDoubleSum(ms.orig.Data.(*otlpmetrics.Metric_Sum).Sum)
|
||||
func (ms Metric) Sum() Sum {
|
||||
return newSum(ms.orig.Data.(*otlpmetrics.Metric_Sum).Sum)
|
||||
}
|
||||
|
||||
// IntHistogram returns the data as IntHistogram.
|
||||
|
|
@ -271,7 +271,7 @@ func copyData(src, dest *otlpmetrics.Metric) {
|
|||
dest.Data = data
|
||||
case *otlpmetrics.Metric_Sum:
|
||||
data := &otlpmetrics.Metric_Sum{Sum: &otlpmetrics.Sum{}}
|
||||
newDoubleSum(srcData.Sum).CopyTo(newDoubleSum(data.Sum))
|
||||
newSum(srcData.Sum).CopyTo(newSum(data.Sum))
|
||||
dest.Data = data
|
||||
case *otlpmetrics.Metric_IntHistogram:
|
||||
data := &otlpmetrics.Metric_IntHistogram{IntHistogram: &otlpmetrics.IntHistogram{}}
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ func TestCopyData(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
name: "DoubleSum",
|
||||
name: "Sum",
|
||||
src: &otlpmetrics.Metric{
|
||||
Data: &otlpmetrics.Metric_Sum{
|
||||
Sum: &otlpmetrics.Sum{},
|
||||
|
|
@ -110,8 +110,8 @@ func TestDataType(t *testing.T) {
|
|||
assert.Equal(t, MetricDataTypeDoubleGauge, m.DataType())
|
||||
m.SetDataType(MetricDataTypeIntSum)
|
||||
assert.Equal(t, MetricDataTypeIntSum, m.DataType())
|
||||
m.SetDataType(MetricDataTypeDoubleSum)
|
||||
assert.Equal(t, MetricDataTypeDoubleSum, m.DataType())
|
||||
m.SetDataType(MetricDataTypeSum)
|
||||
assert.Equal(t, MetricDataTypeSum, m.DataType())
|
||||
m.SetDataType(MetricDataTypeIntHistogram)
|
||||
assert.Equal(t, MetricDataTypeIntHistogram, m.DataType())
|
||||
m.SetDataType(MetricDataTypeHistogram)
|
||||
|
|
@ -260,7 +260,7 @@ func TestDataPointCountWithNilDataPoints(t *testing.T) {
|
|||
intSum := ilm.Metrics().AppendEmpty()
|
||||
intSum.SetDataType(MetricDataTypeIntSum)
|
||||
doubleSum := ilm.Metrics().AppendEmpty()
|
||||
doubleSum.SetDataType(MetricDataTypeDoubleSum)
|
||||
doubleSum.SetDataType(MetricDataTypeSum)
|
||||
assert.EqualValues(t, 0, metrics.DataPointCount())
|
||||
}
|
||||
|
||||
|
|
@ -272,7 +272,7 @@ func TestOtlpToInternalReadOnly(t *testing.T) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoDoubleSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -312,8 +312,8 @@ func TestOtlpToInternalReadOnly(t *testing.T) {
|
|||
assert.EqualValues(t, "my_metric_double", metricDouble.Name())
|
||||
assert.EqualValues(t, "My metric", metricDouble.Description())
|
||||
assert.EqualValues(t, "ms", metricDouble.Unit())
|
||||
assert.EqualValues(t, MetricDataTypeDoubleSum, metricDouble.DataType())
|
||||
dsd := metricDouble.DoubleSum()
|
||||
assert.EqualValues(t, MetricDataTypeSum, metricDouble.DataType())
|
||||
dsd := metricDouble.Sum()
|
||||
assert.EqualValues(t, AggregationTemporalityCumulative, dsd.AggregationTemporality())
|
||||
doubleDataPoints := dsd.DataPoints()
|
||||
assert.EqualValues(t, 2, doubleDataPoints.Len())
|
||||
|
|
@ -360,7 +360,7 @@ func TestOtlpToFromInternalReadOnly(t *testing.T) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoDoubleSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -374,7 +374,7 @@ func TestOtlpToFromInternalReadOnly(t *testing.T) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoDoubleSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -462,7 +462,7 @@ func TestOtlpToFromInternalIntGaugeMutating(t *testing.T) {
|
|||
}, internal.MetricsToOtlp(md.InternalRep()))
|
||||
}
|
||||
|
||||
func TestOtlpToFromInternalDoubleSumMutating(t *testing.T) {
|
||||
func TestOtlpToFromInternalSumMutating(t *testing.T) {
|
||||
newLabels := NewStringMap().InitFromMap(map[string]string{"k": "v"})
|
||||
|
||||
md := MetricsFromInternalRep(internal.MetricsFromOtlp(&otlpcollectormetrics.ExportMetricsServiceRequest{
|
||||
|
|
@ -472,7 +472,7 @@ func TestOtlpToFromInternalDoubleSumMutating(t *testing.T) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoDoubleSumMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoSumMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -488,7 +488,7 @@ func TestOtlpToFromInternalDoubleSumMutating(t *testing.T) {
|
|||
metric.SetUnit("1")
|
||||
assert.EqualValues(t, "1", metric.Unit())
|
||||
// Mutate DataPoints
|
||||
dsd := metric.DoubleSum()
|
||||
dsd := metric.Sum()
|
||||
assert.EqualValues(t, 2, dsd.DataPoints().Len())
|
||||
dsd.DataPoints().Resize(1)
|
||||
assert.EqualValues(t, 1, dsd.DataPoints().Len())
|
||||
|
|
@ -653,7 +653,7 @@ func BenchmarkOtlpToFromInternal_PassThrough(b *testing.B) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoDoubleSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoIntGaugeMetric(), generateTestProtoSumMetric(), generateTestProtoDoubleHistogramMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -696,7 +696,7 @@ func BenchmarkOtlpToFromInternal_IntGauge_MutateOneLabel(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
func BenchmarkOtlpToFromInternal_DoubleSum_MutateOneLabel(b *testing.B) {
|
||||
func BenchmarkOtlpToFromInternal_Sum_MutateOneLabel(b *testing.B) {
|
||||
req := &otlpcollectormetrics.ExportMetricsServiceRequest{
|
||||
ResourceMetrics: []*otlpmetrics.ResourceMetrics{
|
||||
{
|
||||
|
|
@ -704,7 +704,7 @@ func BenchmarkOtlpToFromInternal_DoubleSum_MutateOneLabel(b *testing.B) {
|
|||
InstrumentationLibraryMetrics: []*otlpmetrics.InstrumentationLibraryMetrics{
|
||||
{
|
||||
InstrumentationLibrary: generateTestProtoInstrumentationLibrary(),
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoDoubleSumMetric()},
|
||||
Metrics: []*otlpmetrics.Metric{generateTestProtoSumMetric()},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
@ -714,7 +714,7 @@ func BenchmarkOtlpToFromInternal_DoubleSum_MutateOneLabel(b *testing.B) {
|
|||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
md := MetricsFromInternalRep(internal.MetricsFromOtlp(req))
|
||||
md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).DoubleSum().DataPoints().At(0).LabelsMap().Upsert("key0", "value2")
|
||||
md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Upsert("key0", "value2")
|
||||
newReq := internal.MetricsToOtlp(md.InternalRep())
|
||||
if len(req.ResourceMetrics) != len(newReq.ResourceMetrics) {
|
||||
b.Fail()
|
||||
|
|
@ -801,7 +801,7 @@ func generateTestProtoIntGaugeMetric() *otlpmetrics.Metric {
|
|||
},
|
||||
}
|
||||
}
|
||||
func generateTestProtoDoubleSumMetric() *otlpmetrics.Metric {
|
||||
func generateTestProtoSumMetric() *otlpmetrics.Metric {
|
||||
return &otlpmetrics.Metric{
|
||||
Name: "my_metric_double",
|
||||
Description: "My metric",
|
||||
|
|
|
|||
|
|
@ -88,8 +88,8 @@ func metricDataPointCount(ms pdata.Metric) (dataPointCount int) {
|
|||
dataPointCount = ms.DoubleGauge().DataPoints().Len()
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
dataPointCount = ms.IntSum().DataPoints().Len()
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
dataPointCount = ms.DoubleSum().DataPoints().Len()
|
||||
case pdata.MetricDataTypeSum:
|
||||
dataPointCount = ms.Sum().DataPoints().Len()
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
dataPointCount = ms.IntHistogram().DataPoints().Len()
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -126,9 +126,9 @@ func splitMetric(ms, dest pdata.Metric, size int) (int, bool) {
|
|||
ms.IntSum().DataPoints().RemoveIf(func(_ pdata.IntDataPoint) bool {
|
||||
return filterDataPoints()
|
||||
})
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
dest.DoubleSum().DataPoints().Resize(size)
|
||||
ms.DoubleSum().DataPoints().RemoveIf(func(_ pdata.DoubleDataPoint) bool {
|
||||
case pdata.MetricDataTypeSum:
|
||||
dest.Sum().DataPoints().Resize(size)
|
||||
ms.Sum().DataPoints().RemoveIf(func(_ pdata.DoubleDataPoint) bool {
|
||||
return filterDataPoints()
|
||||
})
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ func TestExprProcessor(t *testing.T) {
|
|||
testFilter(t, pdata.MetricDataTypeIntGauge)
|
||||
testFilter(t, pdata.MetricDataTypeDoubleGauge)
|
||||
testFilter(t, pdata.MetricDataTypeIntSum)
|
||||
testFilter(t, pdata.MetricDataTypeDoubleSum)
|
||||
testFilter(t, pdata.MetricDataTypeSum)
|
||||
testFilter(t, pdata.MetricDataTypeIntHistogram)
|
||||
testFilter(t, pdata.MetricDataTypeHistogram)
|
||||
}
|
||||
|
|
@ -105,8 +105,8 @@ func testFilter(t *testing.T, mdType pdata.MetricDataType) {
|
|||
for l := 0; l < pts.Len(); l++ {
|
||||
assertFiltered(t, pts.At(l).LabelsMap())
|
||||
}
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
pts := metric.DoubleSum().DataPoints()
|
||||
case pdata.MetricDataTypeSum:
|
||||
pts := metric.Sum().DataPoints()
|
||||
for l := 0; l < pts.Len(); l++ {
|
||||
assertFiltered(t, pts.At(l).LabelsMap())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -196,9 +196,9 @@ var Metrics = &metricStruct{
|
|||
metric.SetName("process.cpu.time")
|
||||
metric.SetDescription("Total CPU seconds broken down by different states.")
|
||||
metric.SetUnit("s")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
},
|
||||
},
|
||||
&metricImpl{
|
||||
|
|
@ -267,9 +267,9 @@ var Metrics = &metricStruct{
|
|||
metric.SetName("system.cpu.time")
|
||||
metric.SetDescription("Total CPU seconds broken down by different states.")
|
||||
metric.SetUnit("s")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
},
|
||||
},
|
||||
&metricImpl{
|
||||
|
|
@ -289,9 +289,9 @@ var Metrics = &metricStruct{
|
|||
metric.SetName("system.disk.io_time")
|
||||
metric.SetDescription("Time disk spent activated. On Windows, this is calculated as the inverse of disk idle time.")
|
||||
metric.SetUnit("s")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
},
|
||||
},
|
||||
&metricImpl{
|
||||
|
|
@ -311,9 +311,9 @@ var Metrics = &metricStruct{
|
|||
metric.SetName("system.disk.operation_time")
|
||||
metric.SetDescription("Time spent in disk operations.")
|
||||
metric.SetUnit("s")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
},
|
||||
},
|
||||
&metricImpl{
|
||||
|
|
@ -344,9 +344,9 @@ var Metrics = &metricStruct{
|
|||
metric.SetName("system.disk.weighted_io_time")
|
||||
metric.SetDescription("Time disk spent activated multiplied by the queue length.")
|
||||
metric.SetUnit("s")
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
metric.DoubleSum().SetIsMonotonic(true)
|
||||
metric.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
metric.Sum().SetIsMonotonic(true)
|
||||
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
},
|
||||
},
|
||||
&metricImpl{
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
|
|||
func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, cpuTimes []cpu.TimesStat) {
|
||||
metadata.Metrics.SystemCPUTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(len(cpuTimes) * cpuStatesLen)
|
||||
for i, cpuTime := range cpuTimes {
|
||||
appendCPUTimeStateDataPoints(ddps, i*cpuStatesLen, startTime, now, cpuTime)
|
||||
|
|
|
|||
|
|
@ -109,19 +109,19 @@ func TestScrape(t *testing.T) {
|
|||
func assertCPUMetricValid(t *testing.T, metric pdata.Metric, descriptor pdata.Metric, startTime pdata.Timestamp) {
|
||||
internal.AssertDescriptorEqual(t, descriptor, metric)
|
||||
if startTime != 0 {
|
||||
internal.AssertDoubleSumMetricStartTimeEquals(t, metric, startTime)
|
||||
internal.AssertSumMetricStartTimeEquals(t, metric, startTime)
|
||||
}
|
||||
assert.GreaterOrEqual(t, metric.DoubleSum().DataPoints().Len(), 4*runtime.NumCPU())
|
||||
internal.AssertDoubleSumMetricLabelExists(t, metric, 0, metadata.Labels.Cpu)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 0, metadata.Labels.CPUState, metadata.LabelCPUState.User)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 1, metadata.Labels.CPUState, metadata.LabelCPUState.System)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 2, metadata.Labels.CPUState, metadata.LabelCPUState.Idle)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 3, metadata.Labels.CPUState, metadata.LabelCPUState.Interrupt)
|
||||
assert.GreaterOrEqual(t, metric.Sum().DataPoints().Len(), 4*runtime.NumCPU())
|
||||
internal.AssertSumMetricLabelExists(t, metric, 0, metadata.Labels.Cpu)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 0, metadata.Labels.CPUState, metadata.LabelCPUState.User)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 1, metadata.Labels.CPUState, metadata.LabelCPUState.System)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 2, metadata.Labels.CPUState, metadata.LabelCPUState.Idle)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 3, metadata.Labels.CPUState, metadata.LabelCPUState.Interrupt)
|
||||
}
|
||||
|
||||
func assertCPUMetricHasLinuxSpecificStateLabels(t *testing.T, metric pdata.Metric) {
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 4, metadata.Labels.CPUState, metadata.LabelCPUState.Nice)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 5, metadata.Labels.CPUState, metadata.LabelCPUState.Softirq)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 6, metadata.Labels.CPUState, metadata.LabelCPUState.Steal)
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 7, metadata.Labels.CPUState, metadata.LabelCPUState.Wait)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 4, metadata.Labels.CPUState, metadata.LabelCPUState.Nice)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 5, metadata.Labels.CPUState, metadata.LabelCPUState.Softirq)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 6, metadata.Labels.CPUState, metadata.LabelCPUState.Steal)
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 7, metadata.Labels.CPUState, metadata.LabelCPUState.Wait)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ func initializeDiskOperationsMetric(metric pdata.Metric, startTime, now pdata.Ti
|
|||
func initializeDiskIOTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, ioCounters map[string]disk.IOCountersStat) {
|
||||
metadata.Metrics.SystemDiskIoTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(len(ioCounters))
|
||||
|
||||
idx := 0
|
||||
|
|
@ -150,7 +150,7 @@ func initializeDiskIOTimeMetric(metric pdata.Metric, startTime, now pdata.Timest
|
|||
func initializeDiskOperationTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, ioCounters map[string]disk.IOCountersStat) {
|
||||
metadata.Metrics.SystemDiskOperationTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(2 * len(ioCounters))
|
||||
|
||||
idx := 0
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ func appendSystemSpecificMetrics(metrics pdata.MetricSlice, startIdx int, startT
|
|||
func initializeDiskWeightedIOTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, ioCounters map[string]disk.IOCountersStat) {
|
||||
metadata.Metrics.SystemDiskWeightedIoTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(len(ioCounters))
|
||||
|
||||
idx := 0
|
||||
|
|
|
|||
|
|
@ -137,19 +137,19 @@ func assertInt64DiskMetricValid(t *testing.T, metric pdata.Metric, expectedDescr
|
|||
func assertDoubleDiskMetricValid(t *testing.T, metric pdata.Metric, expectedDescriptor pdata.Metric, expectDirectionLabels bool, startTime pdata.Timestamp) {
|
||||
internal.AssertDescriptorEqual(t, expectedDescriptor, metric)
|
||||
if startTime != 0 {
|
||||
internal.AssertDoubleSumMetricStartTimeEquals(t, metric, startTime)
|
||||
internal.AssertSumMetricStartTimeEquals(t, metric, startTime)
|
||||
}
|
||||
|
||||
minExpectedPoints := 1
|
||||
if expectDirectionLabels {
|
||||
minExpectedPoints = 2
|
||||
}
|
||||
assert.GreaterOrEqual(t, metric.DoubleSum().DataPoints().Len(), minExpectedPoints)
|
||||
assert.GreaterOrEqual(t, metric.Sum().DataPoints().Len(), minExpectedPoints)
|
||||
|
||||
internal.AssertDoubleSumMetricLabelExists(t, metric, 0, "device")
|
||||
internal.AssertSumMetricLabelExists(t, metric, 0, "device")
|
||||
if expectDirectionLabels {
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, 0, "direction", "read")
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, metric, metric.DoubleSum().DataPoints().Len()-1, "direction", "write")
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, 0, "direction", "read")
|
||||
internal.AssertSumMetricLabelHasValue(t, metric, metric.Sum().DataPoints().Len()-1, "direction", "write")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -155,7 +155,7 @@ func initializeDiskOperationsMetric(metric pdata.Metric, startTime, now pdata.Ti
|
|||
func initializeDiskIOTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, logicalDiskCounterValues []*perfcounters.CounterValues) {
|
||||
metadata.Metrics.SystemDiskIoTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(len(logicalDiskCounterValues))
|
||||
for idx, logicalDiskCounter := range logicalDiskCounterValues {
|
||||
// disk active time = system boot time - disk idle time
|
||||
|
|
@ -166,7 +166,7 @@ func initializeDiskIOTimeMetric(metric pdata.Metric, startTime, now pdata.Timest
|
|||
func initializeDiskOperationTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, logicalDiskCounterValues []*perfcounters.CounterValues) {
|
||||
metadata.Metrics.SystemDiskOperationTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(2 * len(logicalDiskCounterValues))
|
||||
for idx, logicalDiskCounter := range logicalDiskCounterValues {
|
||||
initializeDoubleDataPoint(ddps.At(2*idx+0), startTime, now, logicalDiskCounter.InstanceName, metadata.LabelDiskDirection.Read, float64(logicalDiskCounter.Values[avgDiskSecsPerRead])/1e7)
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ func scrapeAndAppendCPUTimeMetric(metrics pdata.MetricSlice, startTime, now pdat
|
|||
func initializeCPUTimeMetric(metric pdata.Metric, startTime, now pdata.Timestamp, times *cpu.TimesStat) {
|
||||
metadata.Metrics.ProcessCPUTime.Init(metric)
|
||||
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
ddps := metric.Sum().DataPoints()
|
||||
ddps.Resize(cpuStatesLen)
|
||||
appendCPUTimeStateDataPoints(ddps, startTime, now, times)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -94,12 +94,12 @@ func assertCPUTimeMetricValid(t *testing.T, resourceMetrics pdata.ResourceMetric
|
|||
cpuTimeMetric := getMetric(t, metadata.Metrics.ProcessCPUTime.New(), resourceMetrics)
|
||||
internal.AssertDescriptorEqual(t, metadata.Metrics.ProcessCPUTime.New(), cpuTimeMetric)
|
||||
if startTime != 0 {
|
||||
internal.AssertDoubleSumMetricStartTimeEquals(t, cpuTimeMetric, startTime)
|
||||
internal.AssertSumMetricStartTimeEquals(t, cpuTimeMetric, startTime)
|
||||
}
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, cpuTimeMetric, 0, "state", "user")
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, cpuTimeMetric, 1, "state", "system")
|
||||
internal.AssertSumMetricLabelHasValue(t, cpuTimeMetric, 0, "state", "user")
|
||||
internal.AssertSumMetricLabelHasValue(t, cpuTimeMetric, 1, "state", "system")
|
||||
if runtime.GOOS == "linux" {
|
||||
internal.AssertDoubleSumMetricLabelHasValue(t, cpuTimeMetric, 2, "state", "wait")
|
||||
internal.AssertSumMetricLabelHasValue(t, cpuTimeMetric, 2, "state", "wait")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -47,8 +47,8 @@ func AssertIntGaugeMetricLabelHasValue(t *testing.T, metric pdata.Metric, index
|
|||
assert.Equal(t, expectedVal, val)
|
||||
}
|
||||
|
||||
func AssertDoubleSumMetricLabelHasValue(t *testing.T, metric pdata.Metric, index int, labelName string, expectedVal string) {
|
||||
val, ok := metric.DoubleSum().DataPoints().At(index).LabelsMap().Get(labelName)
|
||||
func AssertSumMetricLabelHasValue(t *testing.T, metric pdata.Metric, index int, labelName string, expectedVal string) {
|
||||
val, ok := metric.Sum().DataPoints().At(index).LabelsMap().Get(labelName)
|
||||
assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.Name())
|
||||
assert.Equal(t, expectedVal, val)
|
||||
}
|
||||
|
|
@ -58,8 +58,8 @@ func AssertIntSumMetricLabelExists(t *testing.T, metric pdata.Metric, index int,
|
|||
assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.Name())
|
||||
}
|
||||
|
||||
func AssertDoubleSumMetricLabelExists(t *testing.T, metric pdata.Metric, index int, labelName string) {
|
||||
_, ok := metric.DoubleSum().DataPoints().At(index).LabelsMap().Get(labelName)
|
||||
func AssertSumMetricLabelExists(t *testing.T, metric pdata.Metric, index int, labelName string) {
|
||||
_, ok := metric.Sum().DataPoints().At(index).LabelsMap().Get(labelName)
|
||||
assert.Truef(t, ok, "Missing label %q in metric %q", labelName, metric.Name())
|
||||
}
|
||||
|
||||
|
|
@ -70,8 +70,8 @@ func AssertIntSumMetricStartTimeEquals(t *testing.T, metric pdata.Metric, startT
|
|||
}
|
||||
}
|
||||
|
||||
func AssertDoubleSumMetricStartTimeEquals(t *testing.T, metric pdata.Metric, startTime pdata.Timestamp) {
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
func AssertSumMetricStartTimeEquals(t *testing.T, metric pdata.Metric, startTime pdata.Timestamp) {
|
||||
ddps := metric.Sum().DataPoints()
|
||||
for i := 0; i < ddps.Len(); i++ {
|
||||
require.Equal(t, startTime, ddps.At(i).StartTimestamp())
|
||||
}
|
||||
|
|
@ -97,8 +97,8 @@ func AssertSameTimeStampForMetrics(t *testing.T, metrics pdata.MetricSlice, star
|
|||
}
|
||||
}
|
||||
|
||||
if dt == pdata.MetricDataTypeDoubleSum {
|
||||
ddps := metric.DoubleSum().DataPoints()
|
||||
if dt == pdata.MetricDataTypeSum {
|
||||
ddps := metric.Sum().DataPoints()
|
||||
for j := 0; j < ddps.Len(); j++ {
|
||||
if ts == 0 {
|
||||
ts = ddps.At(j).Timestamp()
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ metrics:
|
|||
description: Total CPU seconds broken down by different states.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
monotonic: true
|
||||
labels: [process.state]
|
||||
|
|
@ -135,7 +135,7 @@ metrics:
|
|||
description: Total CPU seconds broken down by different states.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
monotonic: true
|
||||
labels: [cpu.state]
|
||||
|
|
@ -189,7 +189,7 @@ metrics:
|
|||
description: Time disk spent activated. On Windows, this is calculated as the inverse of disk idle time.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
monotonic: true
|
||||
labels: [disk.device]
|
||||
|
|
@ -198,7 +198,7 @@ metrics:
|
|||
description: Time spent in disk operations.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
monotonic: true
|
||||
labels: [disk.device, disk.direction]
|
||||
|
|
@ -207,7 +207,7 @@ metrics:
|
|||
description: Time disk spent activated multiplied by the queue length.
|
||||
unit: s
|
||||
data:
|
||||
type: double sum
|
||||
type: sum
|
||||
aggregation: cumulative
|
||||
monotonic: true
|
||||
labels: [disk.device]
|
||||
|
|
|
|||
|
|
@ -100,7 +100,7 @@ func (mf *metricFamilyPdata) updateLabelKeys(ls labels.Labels) {
|
|||
var _ = (*metricFamilyPdata)(nil).updateLabelKeys
|
||||
|
||||
func (mf *metricFamilyPdata) isCumulativeTypePdata() bool {
|
||||
return mf.mtype == pdata.MetricDataTypeDoubleSum ||
|
||||
return mf.mtype == pdata.MetricDataTypeSum ||
|
||||
mf.mtype == pdata.MetricDataTypeIntSum ||
|
||||
mf.mtype == pdata.MetricDataTypeHistogram ||
|
||||
mf.mtype == pdata.MetricDataTypeSummary
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ func convToPdataMetricType(metricType textparse.MetricType) pdata.MetricDataType
|
|||
switch metricType {
|
||||
case textparse.MetricTypeCounter:
|
||||
// always use float64, as it's the internal data type used in prometheus
|
||||
return pdata.MetricDataTypeDoubleSum
|
||||
return pdata.MetricDataTypeSum
|
||||
// textparse.MetricTypeUnknown is converted to gauge by default to fix Prometheus untyped metrics from being dropped
|
||||
case textparse.MetricTypeGauge, textparse.MetricTypeUnknown:
|
||||
return pdata.MetricDataTypeDoubleGauge
|
||||
|
|
|
|||
|
|
@ -194,12 +194,12 @@ func TestConvToPdataMetricType(t *testing.T) {
|
|||
{
|
||||
name: "textparse.counter",
|
||||
mtype: textparse.MetricTypeCounter,
|
||||
want: pdata.MetricDataTypeDoubleSum,
|
||||
want: pdata.MetricDataTypeSum,
|
||||
},
|
||||
{
|
||||
name: "textparse.gauge",
|
||||
mtype: textparse.MetricTypeCounter,
|
||||
want: pdata.MetricDataTypeDoubleSum,
|
||||
want: pdata.MetricDataTypeSum,
|
||||
},
|
||||
{
|
||||
name: "textparse.unknown",
|
||||
|
|
@ -250,7 +250,7 @@ func TestIsusefulLabelPdata(t *testing.T) {
|
|||
model.MetricNameLabel, model.InstanceLabel, model.SchemeLabel, model.MetricsPathLabel, model.JobLabel,
|
||||
},
|
||||
mtypes: []pdata.MetricDataType{
|
||||
pdata.MetricDataTypeDoubleSum,
|
||||
pdata.MetricDataTypeSum,
|
||||
pdata.MetricDataTypeDoubleGauge,
|
||||
pdata.MetricDataTypeIntHistogram,
|
||||
pdata.MetricDataTypeHistogram,
|
||||
|
|
@ -271,7 +271,7 @@ func TestIsusefulLabelPdata(t *testing.T) {
|
|||
{
|
||||
name: `bucket label with non "int_histogram", "histogram":: useful`,
|
||||
mtypes: []pdata.MetricDataType{
|
||||
pdata.MetricDataTypeDoubleSum,
|
||||
pdata.MetricDataTypeSum,
|
||||
pdata.MetricDataTypeDoubleGauge,
|
||||
pdata.MetricDataTypeSummary,
|
||||
pdata.MetricDataTypeIntSum,
|
||||
|
|
@ -294,7 +294,7 @@ func TestIsusefulLabelPdata(t *testing.T) {
|
|||
name: `quantile label with non-"summary": useful`,
|
||||
labelKeys: []string{model.QuantileLabel},
|
||||
mtypes: []pdata.MetricDataType{
|
||||
pdata.MetricDataTypeDoubleSum,
|
||||
pdata.MetricDataTypeSum,
|
||||
pdata.MetricDataTypeDoubleGauge,
|
||||
pdata.MetricDataTypeIntHistogram,
|
||||
pdata.MetricDataTypeHistogram,
|
||||
|
|
@ -309,7 +309,7 @@ func TestIsusefulLabelPdata(t *testing.T) {
|
|||
name: `any other label with any type:: useful`,
|
||||
labelKeys: []string{"any_label", "foo.bar"},
|
||||
mtypes: []pdata.MetricDataType{
|
||||
pdata.MetricDataTypeDoubleSum,
|
||||
pdata.MetricDataTypeSum,
|
||||
pdata.MetricDataTypeDoubleGauge,
|
||||
pdata.MetricDataTypeIntHistogram,
|
||||
pdata.MetricDataTypeHistogram,
|
||||
|
|
|
|||
|
|
@ -112,10 +112,10 @@ func DiffMetric(diffs []*MetricDiff, expected pdata.Metric, actual pdata.Metric)
|
|||
diffs = diff(diffs, expected.IntSum().IsMonotonic(), actual.IntSum().IsMonotonic(), "IntSum IsMonotonic")
|
||||
diffs = diff(diffs, expected.IntSum().AggregationTemporality(), actual.IntSum().AggregationTemporality(), "IntSum AggregationTemporality")
|
||||
diffs = diffIntPts(diffs, expected.IntSum().DataPoints(), actual.IntSum().DataPoints())
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
diffs = diff(diffs, expected.DoubleSum().IsMonotonic(), actual.DoubleSum().IsMonotonic(), "DoubleSum IsMonotonic")
|
||||
diffs = diff(diffs, expected.DoubleSum().AggregationTemporality(), actual.DoubleSum().AggregationTemporality(), "DoubleSum AggregationTemporality")
|
||||
diffs = diffDoublePts(diffs, expected.DoubleSum().DataPoints(), actual.DoubleSum().DataPoints())
|
||||
case pdata.MetricDataTypeSum:
|
||||
diffs = diff(diffs, expected.Sum().IsMonotonic(), actual.Sum().IsMonotonic(), "Sum IsMonotonic")
|
||||
diffs = diff(diffs, expected.Sum().AggregationTemporality(), actual.Sum().AggregationTemporality(), "Sum AggregationTemporality")
|
||||
diffs = diffDoublePts(diffs, expected.Sum().DataPoints(), actual.Sum().DataPoints())
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
diffs = diff(diffs, expected.IntHistogram().AggregationTemporality(), actual.IntHistogram().AggregationTemporality(), "IntHistogram AggregationTemporality")
|
||||
diffs = diffIntHistogramPts(diffs, expected.IntHistogram().DataPoints(), actual.IntHistogram().DataPoints())
|
||||
|
|
|
|||
|
|
@ -176,9 +176,9 @@ func SortedMetrics(metrics pdata.Metrics) pdata.Metrics {
|
|||
for l := 0; l < m.DoubleGauge().DataPoints().Len(); l++ {
|
||||
m.DoubleGauge().DataPoints().At(l).LabelsMap().Sort()
|
||||
}
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
for l := 0; l < m.DoubleSum().DataPoints().Len(); l++ {
|
||||
m.DoubleSum().DataPoints().At(l).LabelsMap().Sort()
|
||||
case pdata.MetricDataTypeSum:
|
||||
for l := 0; l < m.Sum().DataPoints().Len(); l++ {
|
||||
m.Sum().DataPoints().At(l).LabelsMap().Sort()
|
||||
}
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
for l := 0; l < m.IntHistogram().DataPoints().Len(); l++ {
|
||||
|
|
|
|||
|
|
@ -92,8 +92,8 @@ func collectLabelKeys(metric pdata.Metric) *labelKeys {
|
|||
collectLabelKeysDoubleDataPoints(metric.DoubleGauge().DataPoints(), keySet)
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
collectLabelKeysIntDataPoints(metric.IntSum().DataPoints(), keySet)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
collectLabelKeysDoubleDataPoints(metric.DoubleSum().DataPoints(), keySet)
|
||||
case pdata.MetricDataTypeSum:
|
||||
collectLabelKeysDoubleDataPoints(metric.Sum().DataPoints(), keySet)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
collectLabelKeysIntHistogramDataPoints(metric.IntHistogram().DataPoints(), keySet)
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
@ -193,8 +193,8 @@ func descriptorTypeToOC(metric pdata.Metric) ocmetrics.MetricDescriptor_Type {
|
|||
return ocmetrics.MetricDescriptor_CUMULATIVE_INT64
|
||||
}
|
||||
return ocmetrics.MetricDescriptor_GAUGE_INT64
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
sd := metric.DoubleSum()
|
||||
case pdata.MetricDataTypeSum:
|
||||
sd := metric.Sum()
|
||||
if sd.IsMonotonic() && sd.AggregationTemporality() == pdata.AggregationTemporalityCumulative {
|
||||
return ocmetrics.MetricDescriptor_CUMULATIVE_DOUBLE
|
||||
}
|
||||
|
|
@ -225,8 +225,8 @@ func dataPointsToTimeseries(metric pdata.Metric, labelKeys *labelKeys) []*ocmetr
|
|||
return doublePointToOC(metric.DoubleGauge().DataPoints(), labelKeys)
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
return intPointsToOC(metric.IntSum().DataPoints(), labelKeys)
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
return doublePointToOC(metric.DoubleSum().DataPoints(), labelKeys)
|
||||
case pdata.MetricDataTypeSum:
|
||||
return doublePointToOC(metric.Sum().DataPoints(), labelKeys)
|
||||
case pdata.MetricDataTypeIntHistogram:
|
||||
return intHistogramPointToOC(metric.IntHistogram().DataPoints(), labelKeys)
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
|
|
|
|||
|
|
@ -236,9 +236,9 @@ func TestMetricsType(t *testing.T) {
|
|||
name: "double-non-monotonic-delta-sum",
|
||||
internal: func() pdata.Metric {
|
||||
m := pdata.NewMetric()
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
m.DoubleSum().SetIsMonotonic(false)
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
m.Sum().SetIsMonotonic(false)
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
return m
|
||||
},
|
||||
descType: ocmetrics.MetricDescriptor_GAUGE_DOUBLE,
|
||||
|
|
@ -247,9 +247,9 @@ func TestMetricsType(t *testing.T) {
|
|||
name: "double-non-monotonic-cumulative-sum",
|
||||
internal: func() pdata.Metric {
|
||||
m := pdata.NewMetric()
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
m.DoubleSum().SetIsMonotonic(false)
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
m.Sum().SetIsMonotonic(false)
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
return m
|
||||
},
|
||||
descType: ocmetrics.MetricDescriptor_GAUGE_DOUBLE,
|
||||
|
|
@ -258,9 +258,9 @@ func TestMetricsType(t *testing.T) {
|
|||
name: "double-monotonic-delta-sum",
|
||||
internal: func() pdata.Metric {
|
||||
m := pdata.NewMetric()
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
m.DoubleSum().SetIsMonotonic(true)
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
m.Sum().SetIsMonotonic(true)
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
|
||||
return m
|
||||
},
|
||||
descType: ocmetrics.MetricDescriptor_GAUGE_DOUBLE,
|
||||
|
|
@ -269,9 +269,9 @@ func TestMetricsType(t *testing.T) {
|
|||
name: "double-monotonic-cumulative-sum",
|
||||
internal: func() pdata.Metric {
|
||||
m := pdata.NewMetric()
|
||||
m.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
m.DoubleSum().SetIsMonotonic(true)
|
||||
m.DoubleSum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
m.SetDataType(pdata.MetricDataTypeSum)
|
||||
m.Sum().SetIsMonotonic(true)
|
||||
m.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
return m
|
||||
},
|
||||
descType: ocmetrics.MetricDescriptor_CUMULATIVE_DOUBLE,
|
||||
|
|
|
|||
|
|
@ -182,11 +182,11 @@ func descriptorTypeToMetrics(t ocmetrics.MetricDescriptor_Type, metric pdata.Met
|
|||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
return pdata.MetricDataTypeIntSum
|
||||
case ocmetrics.MetricDescriptor_CUMULATIVE_DOUBLE:
|
||||
metric.SetDataType(pdata.MetricDataTypeDoubleSum)
|
||||
sum := metric.DoubleSum()
|
||||
metric.SetDataType(pdata.MetricDataTypeSum)
|
||||
sum := metric.Sum()
|
||||
sum.SetIsMonotonic(true)
|
||||
sum.SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
|
||||
return pdata.MetricDataTypeDoubleSum
|
||||
return pdata.MetricDataTypeSum
|
||||
case ocmetrics.MetricDescriptor_CUMULATIVE_DISTRIBUTION:
|
||||
metric.SetDataType(pdata.MetricDataTypeHistogram)
|
||||
histo := metric.Histogram()
|
||||
|
|
@ -209,8 +209,8 @@ func setDataPoints(ocMetric *ocmetrics.Metric, metric pdata.Metric) {
|
|||
fillDoubleDataPoint(ocMetric, metric.DoubleGauge().DataPoints())
|
||||
case pdata.MetricDataTypeIntSum:
|
||||
fillIntDataPoint(ocMetric, metric.IntSum().DataPoints())
|
||||
case pdata.MetricDataTypeDoubleSum:
|
||||
fillDoubleDataPoint(ocMetric, metric.DoubleSum().DataPoints())
|
||||
case pdata.MetricDataTypeSum:
|
||||
fillDoubleDataPoint(ocMetric, metric.Sum().DataPoints())
|
||||
case pdata.MetricDataTypeHistogram:
|
||||
fillDoubleHistogramDataPoint(ocMetric, metric.Histogram().DataPoints())
|
||||
case pdata.MetricDataTypeSummary:
|
||||
|
|
|
|||
Loading…
Reference in New Issue