Remove unnecessary and unused testdata (#5599)

Signed-off-by: Bogdan Drutu <bogdandrutu@gmail.com>
This commit is contained in:
Bogdan Drutu 2022-06-27 19:43:33 +03:00 committed by GitHub
parent 259f711300
commit 1a4361c49a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 259 additions and 431 deletions

View File

@ -49,7 +49,7 @@ func verifyTracesProcessorDoesntProduceAfterShutdown(t *testing.T, factory compo
// Send some traces to the processor.
const generatedCount = 10
for i := 0; i < generatedCount; i++ {
require.NoError(t, processor.ConsumeTraces(context.Background(), testdata.GenerateTracesOneSpan()))
require.NoError(t, processor.ConsumeTraces(context.Background(), testdata.GenerateTraces(1)))
}
// Now shutdown the processor.

View File

@ -137,45 +137,45 @@ func setupTestPayloads() []testPayload {
logMarshaler := &logMarshaler{plog.NewProtoMarshaler()}
payloads = append(payloads, testPayload{
name: "sm_log_request",
message: testdata.GenerateLogsOneLogRecord(),
message: testdata.GenerateLogs(1),
marshaler: logMarshaler})
payloads = append(payloads, testPayload{
name: "md_log_request",
message: testdata.GenerateLogsTwoLogRecordsSameResourceOneDifferent(),
message: testdata.GenerateLogs(2),
marshaler: logMarshaler})
payloads = append(payloads, testPayload{
name: "lg_log_request",
message: testdata.GenerateLogsManyLogRecordsSameResource(50),
message: testdata.GenerateLogs(50),
marshaler: logMarshaler})
// trace payloads
tracesMarshaler := &traceMarshaler{ptrace.NewProtoMarshaler()}
payloads = append(payloads, testPayload{
name: "sm_trace_request",
message: testdata.GenerateTracesOneSpan(),
message: testdata.GenerateTraces(1),
marshaler: tracesMarshaler})
payloads = append(payloads, testPayload{
name: "md_trace_request",
message: testdata.GenerateTracesTwoSpansSameResourceOneDifferent(),
message: testdata.GenerateTraces(2),
marshaler: tracesMarshaler})
payloads = append(payloads, testPayload{
name: "lg_trace_request",
message: testdata.GenerateTracesManySpansSameResource(50),
message: testdata.GenerateTraces(50),
marshaler: tracesMarshaler})
// metric payloads
metricsMarshaler := &metricsMarshaler{pmetric.NewProtoMarshaler()}
payloads = append(payloads, testPayload{
name: "sm_metric_request",
message: testdata.GenerateMetricsOneMetric(),
message: testdata.GenerateMetrics(1),
marshaler: metricsMarshaler})
payloads = append(payloads, testPayload{
name: "md_metric_request",
message: testdata.GenerateMetricsTwoMetrics(),
message: testdata.GenerateMetrics(2),
marshaler: metricsMarshaler})
payloads = append(payloads, testPayload{
name: "lg_metric_request",
message: testdata.GenerateMetricsManyMetricsSameResource(50),
message: testdata.GenerateMetrics(50),
marshaler: metricsMarshaler})
return payloads

View File

@ -25,7 +25,7 @@ import (
)
func TestTraces(t *testing.T) {
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
err := errors.New("some error")
traceErr := NewTraces(err, td)
assert.Equal(t, err.Error(), traceErr.Error())
@ -37,7 +37,7 @@ func TestTraces(t *testing.T) {
}
func TestTraces_Unwrap(t *testing.T) {
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
var err error = testErrorType{"some error"}
// Wrapping err with error Traces.
traceErr := NewTraces(err, td)
@ -49,7 +49,7 @@ func TestTraces_Unwrap(t *testing.T) {
}
func TestLogs(t *testing.T) {
td := testdata.GenerateLogsOneLogRecord()
td := testdata.GenerateLogs(1)
err := errors.New("some error")
logsErr := NewLogs(err, td)
assert.Equal(t, err.Error(), logsErr.Error())
@ -61,7 +61,7 @@ func TestLogs(t *testing.T) {
}
func TestLogs_Unwrap(t *testing.T) {
td := testdata.GenerateLogsOneLogRecord()
td := testdata.GenerateLogs(1)
var err error = testErrorType{"some error"}
// Wrapping err with error Logs.
logsErr := NewLogs(err, td)
@ -73,7 +73,7 @@ func TestLogs_Unwrap(t *testing.T) {
}
func TestMetrics(t *testing.T) {
td := testdata.GenerateMetricsOneMetric()
td := testdata.GenerateMetrics(1)
err := errors.New("some error")
metricErr := NewMetrics(err, td)
assert.Equal(t, err.Error(), metricErr.Error())
@ -85,7 +85,7 @@ func TestMetrics(t *testing.T) {
}
func TestMetrics_Unwrap(t *testing.T) {
td := testdata.GenerateMetricsOneMetric()
td := testdata.GenerateMetrics(1)
var err error = testErrorType{"some error"}
// Wrapping err with error Metrics.
metricErr := NewMetrics(err, td)

View File

@ -29,7 +29,7 @@ import (
func TestTracesSink(t *testing.T) {
sink := new(TracesSink)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
want := make([]ptrace.Traces, 0, 7)
for i := 0; i < 7; i++ {
require.NoError(t, sink.ConsumeTraces(context.Background(), td))
@ -44,7 +44,7 @@ func TestTracesSink(t *testing.T) {
func TestMetricsSink(t *testing.T) {
sink := new(MetricsSink)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
want := make([]pmetric.Metrics, 0, 7)
for i := 0; i < 7; i++ {
require.NoError(t, sink.ConsumeMetrics(context.Background(), md))
@ -59,7 +59,7 @@ func TestMetricsSink(t *testing.T) {
func TestLogsSink(t *testing.T) {
sink := new(LogsSink)
md := testdata.GenerateLogsOneLogRecord()
md := testdata.GenerateLogs(1)
want := make([]plog.Logs, 0, 7)
for i := 0; i < 7; i++ {
require.NoError(t, sink.ConsumeLogs(context.Background(), md))

View File

@ -48,7 +48,7 @@ var (
)
func TestLogsRequest(t *testing.T) {
lr := newLogsRequest(context.Background(), testdata.GenerateLogsOneLogRecord(), nil)
lr := newLogsRequest(context.Background(), testdata.GenerateLogs(1), nil)
logErr := consumererror.NewLogs(errors.New("some error"), plog.NewLogs())
assert.EqualValues(
@ -137,7 +137,7 @@ func TestLogsExporter_WithRecordEnqueueFailedMetrics(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, te)
md := testdata.GenerateLogsTwoLogRecordsSameResourceOneDifferent()
md := testdata.GenerateLogs(3)
const numBatches = 7
for i := 0; i < numBatches; i++ {
// errors are checked in the checkExporterEnqueueFailedLogsStats function below.
@ -209,7 +209,7 @@ func checkRecordedMetricsForLogsExporter(t *testing.T, le component.LogsExporter
require.NoError(t, err)
t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) })
ld := testdata.GenerateLogsTwoLogRecordsSameResource()
ld := testdata.GenerateLogs(2)
const numBatches = 7
for i := 0; i < numBatches; i++ {
require.Equal(t, wantError, le.ConsumeLogs(context.Background(), ld))
@ -224,7 +224,7 @@ func checkRecordedMetricsForLogsExporter(t *testing.T, le component.LogsExporter
}
func generateLogsTraffic(t *testing.T, tracer trace.Tracer, le component.LogsExporter, numRequests int, wantError error) {
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
ctx, span := tracer.Start(context.Background(), fakeLogsParentSpanName)
defer span.End()
for i := 0; i < numRequests; i++ {

View File

@ -47,7 +47,7 @@ var (
)
func TestMetricsRequest(t *testing.T) {
mr := newMetricsRequest(context.Background(), testdata.GenerateMetricsOneMetric(), nil)
mr := newMetricsRequest(context.Background(), testdata.GenerateMetrics(1), nil)
metricsErr := consumererror.NewMetrics(errors.New("some error"), pmetric.NewMetrics())
assert.EqualValues(
@ -136,7 +136,7 @@ func TestMetricsExporter_WithRecordEnqueueFailedMetrics(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, te)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
const numBatches = 7
for i := 0; i < numBatches; i++ {
// errors are checked in the checkExporterEnqueueFailedMetricsStats function below.
@ -210,7 +210,7 @@ func checkRecordedMetricsForMetricsExporter(t *testing.T, me component.MetricsEx
require.NoError(t, err)
t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) })
md := testdata.GenerateMetricsTwoMetrics()
md := testdata.GenerateMetrics(2)
const numBatches = 7
for i := 0; i < numBatches; i++ {
require.Equal(t, wantError, me.ConsumeMetrics(context.Background(), md))
@ -226,7 +226,7 @@ func checkRecordedMetricsForMetricsExporter(t *testing.T, me component.MetricsEx
}
func generateMetricsTraffic(t *testing.T, tracer trace.Tracer, me component.MetricsExporter, numRequests int, wantError error) {
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
ctx, span := tracer.Start(context.Background(), fakeMetricsParentSpanName)
defer span.End()
for i := 0; i < numRequests; i++ {

View File

@ -103,7 +103,7 @@ func TestQueuedRetry_OnError(t *testing.T) {
assert.NoError(t, be.Shutdown(context.Background()))
})
traceErr := consumererror.NewTraces(errors.New("some error"), testdata.GenerateTracesOneSpan())
traceErr := consumererror.NewTraces(errors.New("some error"), testdata.GenerateTraces(1))
mockR := newMockRequest(context.Background(), 2, traceErr)
ocs.run(func() {
// This is asynchronous so it should just enqueue, no errors expected.

View File

@ -47,7 +47,7 @@ var (
)
func TestTracesRequest(t *testing.T) {
mr := newTracesRequest(context.Background(), testdata.GenerateTracesOneSpan(), nil)
mr := newTracesRequest(context.Background(), testdata.GenerateTraces(1), nil)
traceErr := consumererror.NewTraces(errors.New("some error"), ptrace.NewTraces())
assert.EqualValues(t, newTracesRequest(context.Background(), ptrace.NewTraces(), nil), mr.onError(traceErr))
@ -134,7 +134,7 @@ func TestTracesExporter_WithRecordEnqueueFailedMetrics(t *testing.T) {
require.NoError(t, err)
require.NotNil(t, te)
td := testdata.GenerateTracesTwoSpansSameResource()
td := testdata.GenerateTraces(2)
const numBatches = 7
for i := 0; i < numBatches; i++ {
// errors are checked in the checkExporterEnqueueFailedTracesStats function below.
@ -210,7 +210,7 @@ func checkRecordedMetricsForTracesExporter(t *testing.T, te component.TracesExpo
require.NoError(t, err)
t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) })
td := testdata.GenerateTracesTwoSpansSameResource()
td := testdata.GenerateTraces(2)
const numBatches = 7
for i := 0; i < numBatches; i++ {
require.Equal(t, wantError, te.ConsumeTraces(context.Background(), td))

View File

@ -32,11 +32,9 @@ func TestLogsText(t *testing.T) {
args args
empty bool
}{
{"empty logs", args{plog.NewLogs()}, true},
{"logs data with empty resource log", args{testdata.GenerateLogsOneEmptyResourceLogs()}, false},
{"logs data with no log records", args{testdata.GenerateLogsNoLogRecords()}, false},
{"logs with one empty log", args{testdata.GenerateLogsOneEmptyLogRecord()}, false},
{"logs with one log", args{testdata.GenerateLogsOneLogRecord()}, false},
{"empty logs", args{ld: plog.NewLogs()}, true},
{"logs with one log", args{ld: testdata.GenerateLogs(1)}, false},
{"logs with lots of log records", args{ld: testdata.GenerateLogs(10)}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View File

@ -33,9 +33,10 @@ func TestMetricsText(t *testing.T) {
empty bool
}{
{"empty metrics", args{pmetric.NewMetrics()}, true},
{"metrics with all types and datapoints", args{testdata.GeneratMetricsAllTypesWithSampleDatapoints()}, false},
{"metrics with all types without datapoints", args{testdata.GenerateMetricsAllTypesEmptyDataPoint()}, false},
{"metrics with all types and datapoints", args{testdata.GenerateMetricsAllTypes()}, false},
{"metrics with all types without datapoints", args{testdata.GenerateMetricsAllTypesEmpty()}, false},
{"metrics with invalid metric types", args{testdata.GenerateMetricsMetricTypeInvalid()}, false},
{"metrics with lots of metrics", args{testdata.GenerateMetrics(10)}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View File

@ -33,7 +33,8 @@ func TestTracesText(t *testing.T) {
empty bool
}{
{"empty traces", args{ptrace.NewTraces()}, true},
{"traces with two spans", args{testdata.GenerateTracesTwoSpansSameResource()}, false},
{"traces with one span", args{testdata.GenerateTraces(1)}, false},
{"traces with lots of spans", args{testdata.GenerateTraces(10)}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View File

@ -37,7 +37,7 @@ func TestLoggingTracesExporterNoErrors(t *testing.T) {
assert.NoError(t, err)
assert.NoError(t, lte.ConsumeTraces(context.Background(), ptrace.NewTraces()))
assert.NoError(t, lte.ConsumeTraces(context.Background(), testdata.GenerateTracesTwoSpansSameResourceOneDifferent()))
assert.NoError(t, lte.ConsumeTraces(context.Background(), testdata.GenerateTraces(10)))
assert.NoError(t, lte.Shutdown(context.Background()))
}
@ -48,9 +48,10 @@ func TestLoggingMetricsExporterNoErrors(t *testing.T) {
assert.NoError(t, err)
assert.NoError(t, lme.ConsumeMetrics(context.Background(), pmetric.NewMetrics()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GeneratMetricsAllTypesWithSampleDatapoints()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GenerateMetricsAllTypesEmptyDataPoint()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GenerateMetricsAllTypes()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GenerateMetricsAllTypesEmpty()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GenerateMetricsMetricTypeInvalid()))
assert.NoError(t, lme.ConsumeMetrics(context.Background(), testdata.GenerateMetrics(10)))
assert.NoError(t, lme.Shutdown(context.Background()))
}
@ -61,9 +62,7 @@ func TestLoggingLogsExporterNoErrors(t *testing.T) {
assert.NoError(t, err)
assert.NoError(t, lle.ConsumeLogs(context.Background(), plog.NewLogs()))
assert.NoError(t, lle.ConsumeLogs(context.Background(), testdata.GenerateLogsOneEmptyResourceLogs()))
assert.NoError(t, lle.ConsumeLogs(context.Background(), testdata.GenerateLogsNoLogRecords()))
assert.NoError(t, lle.ConsumeLogs(context.Background(), testdata.GenerateLogsOneEmptyLogRecord()))
assert.NoError(t, lle.ConsumeLogs(context.Background(), testdata.GenerateLogs(10)))
assert.NoError(t, lle.Shutdown(context.Background()))
}

View File

@ -247,7 +247,7 @@ func TestSendTraces(t *testing.T) {
assert.EqualValues(t, 0, rcv.totalItems.Load())
// A trace with 2 spans.
td = testdata.GenerateTracesTwoSpansSameResource()
td = testdata.GenerateTraces(2)
err = exp.ConsumeTraces(context.Background(), td)
assert.NoError(t, err)
@ -393,7 +393,7 @@ func TestSendMetrics(t *testing.T) {
assert.EqualValues(t, 0, rcv.totalItems.Load())
// Send two metrics.
md = testdata.GenerateMetricsTwoMetrics()
md = testdata.GenerateMetrics(2)
err = exp.ConsumeMetrics(context.Background(), md)
assert.NoError(t, err)
@ -449,7 +449,7 @@ func TestSendTraceDataServerDownAndUp(t *testing.T) {
assert.NoError(t, exp.Start(context.Background(), host))
// A trace with 2 spans.
td := testdata.GenerateTracesTwoSpansSameResource()
td := testdata.GenerateTraces(2)
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
assert.Error(t, exp.ConsumeTraces(ctx, td))
assert.EqualValues(t, context.DeadlineExceeded, ctx.Err())
@ -506,7 +506,7 @@ func TestSendTraceDataServerStartWhileRequest(t *testing.T) {
assert.NoError(t, exp.Start(context.Background(), host))
// A trace with 2 spans.
td := testdata.GenerateTracesTwoSpansSameResource()
td := testdata.GenerateTraces(2)
done := make(chan bool, 1)
defer close(done)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
@ -651,7 +651,7 @@ func TestSendLogData(t *testing.T) {
assert.EqualValues(t, 0, rcv.totalItems.Load())
// A request with 2 log entries.
ld = testdata.GenerateLogsTwoLogRecordsSameResource()
ld = testdata.GenerateLogs(2)
err = exp.ConsumeLogs(context.Background(), ld)
assert.NoError(t, err)

View File

@ -72,17 +72,17 @@ func TestInvalidConfig(t *testing.T) {
func TestTraceNoBackend(t *testing.T) {
addr := testutil.GetAvailableLocalAddress(t)
exp := startTracesExporter(t, "", fmt.Sprintf("http://%s/v1/traces", addr))
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
assert.Error(t, exp.ConsumeTraces(context.Background(), td))
}
func TestTraceInvalidUrl(t *testing.T) {
exp := startTracesExporter(t, "http:/\\//this_is_an/*/invalid_url", "")
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
assert.Error(t, exp.ConsumeTraces(context.Background(), td))
exp = startTracesExporter(t, "", "http:/\\//this_is_an/*/invalid_url")
td = testdata.GenerateTracesOneSpan()
td = testdata.GenerateTraces(1)
assert.Error(t, exp.ConsumeTraces(context.Background(), td))
}
@ -92,7 +92,7 @@ func TestTraceError(t *testing.T) {
startTracesReceiver(t, addr, consumertest.NewErr(errors.New("my_error")))
exp := startTracesExporter(t, "", fmt.Sprintf("http://%s/v1/traces", addr))
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
assert.Error(t, exp.ConsumeTraces(context.Background(), td))
}
@ -127,7 +127,7 @@ func TestTraceRoundTrip(t *testing.T) {
startTracesReceiver(t, addr, sink)
exp := startTracesExporter(t, test.baseURL, test.overrideURL)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
assert.NoError(t, exp.ConsumeTraces(context.Background(), td))
require.Eventually(t, func() bool {
return sink.SpanCount() > 0
@ -145,7 +145,7 @@ func TestMetricsError(t *testing.T) {
startMetricsReceiver(t, addr, consumertest.NewErr(errors.New("my_error")))
exp := startMetricsExporter(t, "", fmt.Sprintf("http://%s/v1/metrics", addr))
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
assert.Error(t, exp.ConsumeMetrics(context.Background(), md))
}
@ -180,7 +180,7 @@ func TestMetricsRoundTrip(t *testing.T) {
startMetricsReceiver(t, addr, sink)
exp := startMetricsExporter(t, test.baseURL, test.overrideURL)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
assert.NoError(t, exp.ConsumeMetrics(context.Background(), md))
require.Eventually(t, func() bool {
return sink.DataPointCount() > 0
@ -198,7 +198,7 @@ func TestLogsError(t *testing.T) {
startLogsReceiver(t, addr, consumertest.NewErr(errors.New("my_error")))
exp := startLogsExporter(t, "", fmt.Sprintf("http://%s/v1/logs", addr))
md := testdata.GenerateLogsOneLogRecord()
md := testdata.GenerateLogs(1)
assert.Error(t, exp.ConsumeLogs(context.Background(), md))
}
@ -233,7 +233,7 @@ func TestLogsRoundTrip(t *testing.T) {
startLogsReceiver(t, addr, sink)
exp := startLogsExporter(t, test.baseURL, test.overrideURL)
md := testdata.GenerateLogsOneLogRecord()
md := testdata.GenerateLogs(1)
assert.NoError(t, exp.ConsumeLogs(context.Background(), md))
require.Eventually(t, func() bool {
return sink.LogRecordCount() > 0

View File

@ -18,70 +18,47 @@ import (
"go.opentelemetry.io/collector/pdata/pcommon"
)
var (
resourceAttributes1 = pcommon.NewMapFromRaw(map[string]interface{}{"resource-attr": "resource-attr-val-1"})
resourceAttributes2 = pcommon.NewMapFromRaw(map[string]interface{}{"resource-attr": "resource-attr-val-2"})
spanEventAttributes = pcommon.NewMapFromRaw(map[string]interface{}{"span-event-attr": "span-event-attr-val"})
spanLinkAttributes = pcommon.NewMapFromRaw(map[string]interface{}{"span-link-attr": "span-link-attr-val"})
spanAttributes = pcommon.NewMapFromRaw(map[string]interface{}{"span-attr": "span-attr-val"})
metricAttachment = pcommon.NewMapFromRaw(map[string]interface{}{"exemplar-attachment": "exemplar-attachment-value"})
)
const (
TestLabelKey1 = "label-1"
TestLabelValue1 = "label-value-1"
TestLabelKey2 = "label-2"
TestLabelValue2 = "label-value-2"
TestLabelKey3 = "label-3"
TestLabelValue3 = "label-value-3"
testLabelKey2 = "label-2"
testLabelValue2 = "label-value-2"
)
func initResourceAttributes1(dest pcommon.Map) {
dest.Clear()
resourceAttributes1.CopyTo(dest)
}
func initResourceAttributes2(dest pcommon.Map) {
dest.Clear()
resourceAttributes2.CopyTo(dest)
}
func initSpanAttributes(dest pcommon.Map) {
dest.Clear()
spanAttributes.CopyTo(dest)
dest.InsertString("resource-attr", "resource-attr-val-1")
}
func initSpanEventAttributes(dest pcommon.Map) {
dest.Clear()
spanEventAttributes.CopyTo(dest)
dest.InsertString("span-event-attr", "span-event-attr-val")
}
func initSpanLinkAttributes(dest pcommon.Map) {
dest.Clear()
spanLinkAttributes.CopyTo(dest)
dest.InsertString("span-link-attr", "span-link-attr-val")
}
func initMetricAttachment(dest pcommon.Map) {
func initMetricExemplarAttributes(dest pcommon.Map) {
dest.Clear()
metricAttachment.CopyTo(dest)
dest.InsertString("exemplar-attachment", "exemplar-attachment-value")
}
func initMetricAttributes1(dest pcommon.Map) {
dest.Clear()
dest.InsertString(TestLabelKey1, TestLabelValue1)
dest.InsertString("label-1", "label-value-1")
}
func initMetricAttributes12(dest pcommon.Map) {
initMetricAttributes1(dest)
dest.InsertString(TestLabelKey2, TestLabelValue2)
dest.InsertString(testLabelKey2, testLabelValue2)
}
func initMetricAttributes13(dest pcommon.Map) {
initMetricAttributes1(dest)
dest.InsertString(TestLabelKey3, TestLabelValue3)
dest.InsertString("label-3", "label-value-3")
}
func initMetricAttributes2(dest pcommon.Map) {
dest.Clear()
dest.InsertString(TestLabelKey2, TestLabelValue2)
dest.InsertString(testLabelKey2, testLabelValue2)
}

View File

@ -22,57 +22,27 @@ import (
)
var (
TestLogTime = time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC)
TestLogTimestamp = pcommon.NewTimestampFromTime(TestLogTime)
logTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC))
)
func GenerateLogsOneEmptyResourceLogs() plog.Logs {
func GenerateLogs(count int) plog.Logs {
ld := plog.NewLogs()
ld.ResourceLogs().AppendEmpty()
initResource(ld.ResourceLogs().AppendEmpty().Resource())
logs := ld.ResourceLogs().At(0).ScopeLogs().AppendEmpty().LogRecords()
logs.EnsureCapacity(count)
for i := 0; i < count; i++ {
switch i % 2 {
case 0:
fillLogOne(logs.AppendEmpty())
case 1:
fillLogTwo(logs.AppendEmpty())
}
}
return ld
}
func GenerateLogsNoLogRecords() plog.Logs {
ld := GenerateLogsOneEmptyResourceLogs()
initResource1(ld.ResourceLogs().At(0).Resource())
return ld
}
func GenerateLogsOneEmptyLogRecord() plog.Logs {
ld := GenerateLogsNoLogRecords()
rs0 := ld.ResourceLogs().At(0)
rs0.ScopeLogs().AppendEmpty().LogRecords().AppendEmpty()
return ld
}
func GenerateLogsOneLogRecord() plog.Logs {
ld := GenerateLogsOneEmptyLogRecord()
fillLogOne(ld.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords().At(0))
return ld
}
func GenerateLogsTwoLogRecordsSameResource() plog.Logs {
ld := GenerateLogsOneEmptyLogRecord()
logs := ld.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
fillLogOne(logs.At(0))
fillLogTwo(logs.AppendEmpty())
return ld
}
func GenerateLogsTwoLogRecordsSameResourceOneDifferent() plog.Logs {
ld := plog.NewLogs()
rl0 := ld.ResourceLogs().AppendEmpty()
initResource1(rl0.Resource())
logs := rl0.ScopeLogs().AppendEmpty().LogRecords()
fillLogOne(logs.AppendEmpty())
fillLogTwo(logs.AppendEmpty())
rl1 := ld.ResourceLogs().AppendEmpty()
initResource2(rl1.Resource())
fillLogThree(rl1.ScopeLogs().AppendEmpty().LogRecords().AppendEmpty())
return ld
}
func fillLogOne(log plog.LogRecord) {
log.SetTimestamp(TestLogTimestamp)
log.SetTimestamp(logTimestamp)
log.SetDroppedAttributesCount(1)
log.SetSeverityNumber(plog.SeverityNumberINFO)
log.SetSeverityText("Info")
@ -87,7 +57,7 @@ func fillLogOne(log plog.LogRecord) {
}
func fillLogTwo(log plog.LogRecord) {
log.SetTimestamp(TestLogTimestamp)
log.SetTimestamp(logTimestamp)
log.SetDroppedAttributesCount(1)
log.SetSeverityNumber(plog.SeverityNumberINFO)
log.SetSeverityText("Info")
@ -98,33 +68,3 @@ func fillLogTwo(log plog.LogRecord) {
log.Body().SetStringVal("something happened")
}
func fillLogThree(log plog.LogRecord) {
log.SetTimestamp(TestLogTimestamp)
log.SetDroppedAttributesCount(1)
log.SetSeverityNumber(plog.SeverityNumberWARN)
log.SetSeverityText("Warning")
log.Body().SetStringVal("something else happened")
}
func GenerateLogsManyLogRecordsSameResource(count int) plog.Logs {
ld := GenerateLogsOneEmptyLogRecord()
logs := ld.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
logs.EnsureCapacity(count)
for i := 0; i < count; i++ {
var l plog.LogRecord
if i < logs.Len() {
l = logs.At(i)
} else {
l = logs.AppendEmpty()
}
if i%2 == 0 {
fillLogOne(l)
} else {
fillLogTwo(l)
}
}
return ld
}

View File

@ -22,14 +22,9 @@ import (
)
var (
TestMetricStartTime = time.Date(2020, 2, 11, 20, 26, 12, 321, time.UTC)
TestMetricStartTimestamp = pcommon.NewTimestampFromTime(TestMetricStartTime)
TestMetricExemplarTime = time.Date(2020, 2, 11, 20, 26, 13, 123, time.UTC)
TestMetricExemplarTimestamp = pcommon.NewTimestampFromTime(TestMetricExemplarTime)
TestMetricTime = time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC)
TestMetricTimestamp = pcommon.NewTimestampFromTime(TestMetricTime)
metricStartTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 12, 321, time.UTC))
metricExemplarTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 13, 123, time.UTC))
metricTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC))
)
const (
@ -42,44 +37,16 @@ const (
TestSummaryMetricName = "summary"
)
func GenerateMetricsOneEmptyResourceMetrics() pmetric.Metrics {
func generateMetricsOneEmptyInstrumentationScope() pmetric.Metrics {
md := pmetric.NewMetrics()
md.ResourceMetrics().AppendEmpty()
return md
}
func GenerateMetricsNoLibraries() pmetric.Metrics {
md := GenerateMetricsOneEmptyResourceMetrics()
ms0 := md.ResourceMetrics().At(0)
initResource1(ms0.Resource())
return md
}
func GenerateMetricsOneEmptyInstrumentationScope() pmetric.Metrics {
md := GenerateMetricsNoLibraries()
initResource(md.ResourceMetrics().AppendEmpty().Resource())
md.ResourceMetrics().At(0).ScopeMetrics().AppendEmpty()
return md
}
func GenerateMetricsOneMetric() pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
rm0ils0 := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
initSumIntMetric(rm0ils0.Metrics().AppendEmpty())
return md
}
func GenerateMetricsTwoMetrics() pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
rm0ils0 := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
initSumIntMetric(rm0ils0.Metrics().AppendEmpty())
initSumIntMetric(rm0ils0.Metrics().AppendEmpty())
return md
}
func GenerateMetricsAllTypesEmptyDataPoint() pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
ilm0 := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
ms := ilm0.Metrics()
func GenerateMetricsAllTypesEmpty() pmetric.Metrics {
md := generateMetricsOneEmptyInstrumentationScope()
ms := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
doubleGauge := ms.AppendEmpty()
initMetric(doubleGauge, TestGaugeDoubleMetricName, pmetric.MetricDataTypeGauge)
@ -103,17 +70,14 @@ func GenerateMetricsAllTypesEmptyDataPoint() pmetric.Metrics {
}
func GenerateMetricsMetricTypeInvalid() pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
ilm0 := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
initMetric(ilm0.Metrics().AppendEmpty(), TestSumIntMetricName, pmetric.MetricDataTypeNone)
md := generateMetricsOneEmptyInstrumentationScope()
initMetric(md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().AppendEmpty(), TestSumIntMetricName, pmetric.MetricDataTypeNone)
return md
}
func GeneratMetricsAllTypesWithSampleDatapoints() pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
ilm := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
ms := ilm.Metrics()
func GenerateMetricsAllTypes() pmetric.Metrics {
md := generateMetricsOneEmptyInstrumentationScope()
ms := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
initGaugeIntMetric(ms.AppendEmpty())
initGaugeDoubleMetric(ms.AppendEmpty())
initSumIntMetric(ms.AppendEmpty())
@ -124,19 +88,44 @@ func GeneratMetricsAllTypesWithSampleDatapoints() pmetric.Metrics {
return md
}
func GenerateMetrics(count int) pmetric.Metrics {
md := generateMetricsOneEmptyInstrumentationScope()
ms := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
ms.EnsureCapacity(count)
for i := 0; i < count; i++ {
switch i % 7 {
case 0:
initGaugeIntMetric(ms.AppendEmpty())
case 1:
initGaugeDoubleMetric(ms.AppendEmpty())
case 2:
initSumIntMetric(ms.AppendEmpty())
case 3:
initSumDoubleMetric(ms.AppendEmpty())
case 4:
initHistogramMetric(ms.AppendEmpty())
case 5:
initExponentialHistogramMetric(ms.AppendEmpty())
case 6:
initSummaryMetric(ms.AppendEmpty())
}
}
return md
}
func initGaugeIntMetric(im pmetric.Metric) {
initMetric(im, TestGaugeIntMetricName, pmetric.MetricDataTypeGauge)
idps := im.Gauge().DataPoints()
idp0 := idps.AppendEmpty()
initMetricAttributes1(idp0.Attributes())
idp0.SetStartTimestamp(TestMetricStartTimestamp)
idp0.SetTimestamp(TestMetricTimestamp)
idp0.SetStartTimestamp(metricStartTimestamp)
idp0.SetTimestamp(metricTimestamp)
idp0.SetIntVal(123)
idp1 := idps.AppendEmpty()
initMetricAttributes2(idp1.Attributes())
idp1.SetStartTimestamp(TestMetricStartTimestamp)
idp1.SetTimestamp(TestMetricTimestamp)
idp1.SetStartTimestamp(metricStartTimestamp)
idp1.SetTimestamp(metricTimestamp)
idp1.SetIntVal(456)
}
@ -146,13 +135,13 @@ func initGaugeDoubleMetric(im pmetric.Metric) {
idps := im.Gauge().DataPoints()
idp0 := idps.AppendEmpty()
initMetricAttributes12(idp0.Attributes())
idp0.SetStartTimestamp(TestMetricStartTimestamp)
idp0.SetTimestamp(TestMetricTimestamp)
idp0.SetStartTimestamp(metricStartTimestamp)
idp0.SetTimestamp(metricTimestamp)
idp0.SetDoubleVal(1.23)
idp1 := idps.AppendEmpty()
initMetricAttributes13(idp1.Attributes())
idp1.SetStartTimestamp(TestMetricStartTimestamp)
idp1.SetTimestamp(TestMetricTimestamp)
idp1.SetStartTimestamp(metricStartTimestamp)
idp1.SetTimestamp(metricTimestamp)
idp1.SetDoubleVal(4.56)
}
@ -162,13 +151,13 @@ func initSumIntMetric(im pmetric.Metric) {
idps := im.Sum().DataPoints()
idp0 := idps.AppendEmpty()
initMetricAttributes1(idp0.Attributes())
idp0.SetStartTimestamp(TestMetricStartTimestamp)
idp0.SetTimestamp(TestMetricTimestamp)
idp0.SetStartTimestamp(metricStartTimestamp)
idp0.SetTimestamp(metricTimestamp)
idp0.SetIntVal(123)
idp1 := idps.AppendEmpty()
initMetricAttributes2(idp1.Attributes())
idp1.SetStartTimestamp(TestMetricStartTimestamp)
idp1.SetTimestamp(TestMetricTimestamp)
idp1.SetStartTimestamp(metricStartTimestamp)
idp1.SetTimestamp(metricTimestamp)
idp1.SetIntVal(456)
}
@ -178,14 +167,14 @@ func initSumDoubleMetric(dm pmetric.Metric) {
ddps := dm.Sum().DataPoints()
ddp0 := ddps.AppendEmpty()
initMetricAttributes12(ddp0.Attributes())
ddp0.SetStartTimestamp(TestMetricStartTimestamp)
ddp0.SetTimestamp(TestMetricTimestamp)
ddp0.SetStartTimestamp(metricStartTimestamp)
ddp0.SetTimestamp(metricTimestamp)
ddp0.SetDoubleVal(1.23)
ddp1 := ddps.AppendEmpty()
initMetricAttributes13(ddp1.Attributes())
ddp1.SetStartTimestamp(TestMetricStartTimestamp)
ddp1.SetTimestamp(TestMetricTimestamp)
ddp1.SetStartTimestamp(metricStartTimestamp)
ddp1.SetTimestamp(metricTimestamp)
ddp1.SetDoubleVal(4.56)
}
@ -195,24 +184,24 @@ func initHistogramMetric(hm pmetric.Metric) {
hdps := hm.Histogram().DataPoints()
hdp0 := hdps.AppendEmpty()
initMetricAttributes13(hdp0.Attributes())
hdp0.SetStartTimestamp(TestMetricStartTimestamp)
hdp0.SetTimestamp(TestMetricTimestamp)
hdp0.SetStartTimestamp(metricStartTimestamp)
hdp0.SetTimestamp(metricTimestamp)
hdp0.SetCount(1)
hdp0.SetSum(15)
hdp1 := hdps.AppendEmpty()
initMetricAttributes2(hdp1.Attributes())
hdp1.SetStartTimestamp(TestMetricStartTimestamp)
hdp1.SetTimestamp(TestMetricTimestamp)
hdp1.SetStartTimestamp(metricStartTimestamp)
hdp1.SetTimestamp(metricTimestamp)
hdp1.SetCount(1)
hdp1.SetSum(15)
hdp1.SetMin(15)
hdp1.SetMax(15)
hdp1.SetBucketCounts(pcommon.NewImmutableUInt64Slice([]uint64{0, 1}))
exemplar := hdp1.Exemplars().AppendEmpty()
exemplar.SetTimestamp(TestMetricExemplarTimestamp)
exemplar.SetTimestamp(metricExemplarTimestamp)
exemplar.SetDoubleVal(15)
initMetricAttachment(exemplar.FilteredAttributes())
initMetricExemplarAttributes(exemplar.FilteredAttributes())
hdp1.SetExplicitBounds(pcommon.NewImmutableFloat64Slice([]float64{1}))
}
@ -222,8 +211,8 @@ func initExponentialHistogramMetric(hm pmetric.Metric) {
hdps := hm.ExponentialHistogram().DataPoints()
hdp0 := hdps.AppendEmpty()
initMetricAttributes13(hdp0.Attributes())
hdp0.SetStartTimestamp(TestMetricStartTimestamp)
hdp0.SetTimestamp(TestMetricTimestamp)
hdp0.SetStartTimestamp(metricStartTimestamp)
hdp0.SetTimestamp(metricTimestamp)
hdp0.SetCount(5)
hdp0.SetSum(0.15)
hdp0.SetZeroCount(1)
@ -245,8 +234,8 @@ func initExponentialHistogramMetric(hm pmetric.Metric) {
hdp1 := hdps.AppendEmpty()
initMetricAttributes2(hdp1.Attributes())
hdp1.SetStartTimestamp(TestMetricStartTimestamp)
hdp1.SetTimestamp(TestMetricTimestamp)
hdp1.SetStartTimestamp(metricStartTimestamp)
hdp1.SetTimestamp(metricTimestamp)
hdp1.SetCount(3)
hdp1.SetSum(1.25)
hdp1.SetMin(0)
@ -264,9 +253,9 @@ func initExponentialHistogramMetric(hm pmetric.Metric) {
// Bucket [1.000000, 4.000000), Count: 1
exemplar := hdp1.Exemplars().AppendEmpty()
exemplar.SetTimestamp(TestMetricExemplarTimestamp)
exemplar.SetTimestamp(metricExemplarTimestamp)
exemplar.SetDoubleVal(15)
initMetricAttachment(exemplar.FilteredAttributes())
initMetricExemplarAttributes(exemplar.FilteredAttributes())
}
func initSummaryMetric(sm pmetric.Metric) {
@ -275,15 +264,15 @@ func initSummaryMetric(sm pmetric.Metric) {
sdps := sm.Summary().DataPoints()
sdp0 := sdps.AppendEmpty()
initMetricAttributes13(sdp0.Attributes())
sdp0.SetStartTimestamp(TestMetricStartTimestamp)
sdp0.SetTimestamp(TestMetricTimestamp)
sdp0.SetStartTimestamp(metricStartTimestamp)
sdp0.SetTimestamp(metricTimestamp)
sdp0.SetCount(1)
sdp0.SetSum(15)
sdp1 := sdps.AppendEmpty()
initMetricAttributes2(sdp1.Attributes())
sdp1.SetStartTimestamp(TestMetricStartTimestamp)
sdp1.SetTimestamp(TestMetricTimestamp)
sdp1.SetStartTimestamp(metricStartTimestamp)
sdp1.SetTimestamp(metricTimestamp)
sdp1.SetCount(1)
sdp1.SetSum(15)
@ -310,13 +299,3 @@ func initMetric(m pmetric.Metric, name string, ty pmetric.MetricDataType) {
histo.SetAggregationTemporality(pmetric.MetricAggregationTemporalityDelta)
}
}
func GenerateMetricsManyMetricsSameResource(metricsCount int) pmetric.Metrics {
md := GenerateMetricsOneEmptyInstrumentationScope()
rs0ilm0 := md.ResourceMetrics().At(0).ScopeMetrics().At(0)
rs0ilm0.Metrics().EnsureCapacity(metricsCount)
for i := 0; i < metricsCount; i++ {
initSumIntMetric(rs0ilm0.Metrics().AppendEmpty())
}
return md
}

View File

@ -16,10 +16,6 @@ package testdata
import "go.opentelemetry.io/collector/pdata/pcommon"
func initResource1(r pcommon.Resource) {
func initResource(r pcommon.Resource) {
initResourceAttributes1(r.Attributes())
}
func initResource2(r pcommon.Resource) {
initResourceAttributes2(r.Attributes())
}

View File

@ -22,89 +22,42 @@ import (
)
var (
TestSpanStartTime = time.Date(2020, 2, 11, 20, 26, 12, 321, time.UTC)
TestSpanStartTimestamp = pcommon.NewTimestampFromTime(TestSpanStartTime)
TestSpanEventTime = time.Date(2020, 2, 11, 20, 26, 13, 123, time.UTC)
TestSpanEventTimestamp = pcommon.NewTimestampFromTime(TestSpanEventTime)
TestSpanEndTime = time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC)
TestSpanEndTimestamp = pcommon.NewTimestampFromTime(TestSpanEndTime)
spanStartTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 12, 321, time.UTC))
spanEventTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 13, 123, time.UTC))
spanEndTimestamp = pcommon.NewTimestampFromTime(time.Date(2020, 2, 11, 20, 26, 13, 789, time.UTC))
)
func GenerateTracesOneEmptyResourceSpans() ptrace.Traces {
func GenerateTraces(spanCount int) ptrace.Traces {
td := ptrace.NewTraces()
td.ResourceSpans().AppendEmpty()
return td
}
func GenerateTracesNoLibraries() ptrace.Traces {
td := GenerateTracesOneEmptyResourceSpans()
rs0 := td.ResourceSpans().At(0)
initResource1(rs0.Resource())
return td
}
func GenerateTracesOneEmptyInstrumentationScope() ptrace.Traces {
td := GenerateTracesNoLibraries()
td.ResourceSpans().At(0).ScopeSpans().AppendEmpty()
return td
}
func GenerateTracesOneSpan() ptrace.Traces {
td := GenerateTracesOneEmptyInstrumentationScope()
rs0ils0 := td.ResourceSpans().At(0).ScopeSpans().At(0)
fillSpanOne(rs0ils0.Spans().AppendEmpty())
return td
}
func GenerateTracesTwoSpansSameResource() ptrace.Traces {
td := GenerateTracesOneEmptyInstrumentationScope()
rs0ils0 := td.ResourceSpans().At(0).ScopeSpans().At(0)
fillSpanOne(rs0ils0.Spans().AppendEmpty())
fillSpanTwo(rs0ils0.Spans().AppendEmpty())
return td
}
func GenerateTracesTwoSpansSameResourceOneDifferent() ptrace.Traces {
td := ptrace.NewTraces()
rs0 := td.ResourceSpans().AppendEmpty()
initResource1(rs0.Resource())
rs0ils0 := rs0.ScopeSpans().AppendEmpty()
fillSpanOne(rs0ils0.Spans().AppendEmpty())
fillSpanTwo(rs0ils0.Spans().AppendEmpty())
rs1 := td.ResourceSpans().AppendEmpty()
initResource2(rs1.Resource())
rs1ils0 := rs1.ScopeSpans().AppendEmpty()
fillSpanThree(rs1ils0.Spans().AppendEmpty())
return td
}
func GenerateTracesManySpansSameResource(spanCount int) ptrace.Traces {
td := GenerateTracesOneEmptyInstrumentationScope()
rs0ils0 := td.ResourceSpans().At(0).ScopeSpans().At(0)
rs0ils0.Spans().EnsureCapacity(spanCount)
initResource(td.ResourceSpans().AppendEmpty().Resource())
ss := td.ResourceSpans().At(0).ScopeSpans().AppendEmpty().Spans()
ss.EnsureCapacity(spanCount)
for i := 0; i < spanCount; i++ {
fillSpanOne(rs0ils0.Spans().AppendEmpty())
switch i % 2 {
case 0:
fillSpanOne(ss.AppendEmpty())
case 1:
fillSpanTwo(ss.AppendEmpty())
}
}
return td
}
func fillSpanOne(span ptrace.Span) {
span.SetName("operationA")
span.SetStartTimestamp(TestSpanStartTimestamp)
span.SetEndTimestamp(TestSpanEndTimestamp)
span.SetStartTimestamp(spanStartTimestamp)
span.SetEndTimestamp(spanEndTimestamp)
span.SetDroppedAttributesCount(1)
span.SetTraceID(pcommon.NewTraceID([16]byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10}))
span.SetSpanID(pcommon.NewSpanID([8]byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18}))
evs := span.Events()
ev0 := evs.AppendEmpty()
ev0.SetTimestamp(TestSpanEventTimestamp)
ev0.SetTimestamp(spanEventTimestamp)
ev0.SetName("event-with-attr")
initSpanEventAttributes(ev0.Attributes())
ev0.SetDroppedAttributesCount(2)
ev1 := evs.AppendEmpty()
ev1.SetTimestamp(TestSpanEventTimestamp)
ev1.SetTimestamp(spanEventTimestamp)
ev1.SetName("event")
ev1.SetDroppedAttributesCount(2)
span.SetDroppedEventsCount(1)
@ -115,8 +68,8 @@ func fillSpanOne(span ptrace.Span) {
func fillSpanTwo(span ptrace.Span) {
span.SetName("operationB")
span.SetStartTimestamp(TestSpanStartTimestamp)
span.SetEndTimestamp(TestSpanEndTimestamp)
span.SetStartTimestamp(spanStartTimestamp)
span.SetEndTimestamp(spanEndTimestamp)
link0 := span.Links().AppendEmpty()
initSpanLinkAttributes(link0.Attributes())
link0.SetDroppedAttributesCount(4)
@ -124,11 +77,3 @@ func fillSpanTwo(span ptrace.Span) {
link1.SetDroppedAttributesCount(4)
span.SetDroppedLinksCount(3)
}
func fillSpanThree(span ptrace.Span) {
span.SetName("operationC")
span.SetStartTimestamp(TestSpanStartTimestamp)
span.SetEndTimestamp(TestSpanEndTimestamp)
initSpanAttributes(span.Attributes())
span.SetDroppedAttributesCount(5)
}

View File

@ -50,7 +50,7 @@ func TestBatchProcessorSpansDelivered(t *testing.T) {
spansPerRequest := 100
traceDataSlice := make([]ptrace.Traces, 0, requestCount)
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for spanIndex := 0; spanIndex < spansPerRequest; spanIndex++ {
spans.At(spanIndex).SetName(getTestSpanName(requestNum, spanIndex))
@ -91,7 +91,7 @@ func TestBatchProcessorSpansDeliveredEnforceBatchSize(t *testing.T) {
requestCount := 1000
spansPerRequest := 150
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for spanIndex := 0; spanIndex < spansPerRequest; spanIndex++ {
spans.At(spanIndex).SetName(getTestSpanName(requestNum, spanIndex))
@ -143,7 +143,7 @@ func TestBatchProcessorSentBySize(t *testing.T) {
start := time.Now()
sizeSum := 0
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
sizeSum += sizer.TracesSize(td)
assert.NoError(t, batcher.ConsumeTraces(context.Background(), td))
}
@ -207,7 +207,7 @@ func TestBatchProcessorSentBySize_withMaxSize(t *testing.T) {
start := time.Now()
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
assert.NoError(t, batcher.ConsumeTraces(context.Background(), td))
}
@ -250,7 +250,7 @@ func TestBatchProcessorSentByTimeout(t *testing.T) {
require.NoError(t, batcher.Start(context.Background(), componenttest.NewNopHost()))
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
assert.NoError(t, batcher.ConsumeTraces(context.Background(), td))
}
@ -299,7 +299,7 @@ func TestBatchProcessorTraceSendWhenClosing(t *testing.T) {
requestCount := 10
spansPerRequest := 10
for requestNum := 0; requestNum < requestCount; requestNum++ {
td := testdata.GenerateTracesManySpansSameResource(spansPerRequest)
td := testdata.GenerateTraces(spansPerRequest)
assert.NoError(t, batcher.ConsumeTraces(context.Background(), td))
}
@ -330,7 +330,7 @@ func TestBatchMetricProcessor_ReceivingData(t *testing.T) {
metricDataSlice := make([]pmetric.Metrics, 0, requestCount)
for requestNum := 0; requestNum < requestCount; requestNum++ {
md := testdata.GenerateMetricsManyMetricsSameResource(metricsPerRequest)
md := testdata.GenerateMetrics(metricsPerRequest)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
for metricIndex := 0; metricIndex < metricsPerRequest; metricIndex++ {
metrics.At(metricIndex).SetName(getTestMetricName(requestNum, metricIndex))
@ -386,7 +386,7 @@ func TestBatchMetricProcessor_BatchSize(t *testing.T) {
start := time.Now()
size := 0
for requestNum := 0; requestNum < requestCount; requestNum++ {
md := testdata.GenerateMetricsManyMetricsSameResource(metricsPerRequest)
md := testdata.GenerateMetrics(metricsPerRequest)
size += sizer.MetricsSize(md)
assert.NoError(t, batcher.ConsumeMetrics(context.Background(), md))
}
@ -433,7 +433,7 @@ func TestBatchMetrics_UnevenBatchMaxSize(t *testing.T) {
sendBatchMaxSize := 99
batchMetrics := newBatchMetrics(sink)
md := testdata.GenerateMetricsManyMetricsSameResource(metricsCount)
md := testdata.GenerateMetrics(metricsCount)
batchMetrics.add(md)
require.Equal(t, dataPointsPerMetric*metricsCount, batchMetrics.dataPointCount)
@ -461,7 +461,7 @@ func TestBatchMetricsProcessor_Timeout(t *testing.T) {
start := time.Now()
for requestNum := 0; requestNum < requestCount; requestNum++ {
md := testdata.GenerateMetricsManyMetricsSameResource(metricsPerRequest)
md := testdata.GenerateMetrics(metricsPerRequest)
assert.NoError(t, batcher.ConsumeMetrics(context.Background(), md))
}
@ -509,7 +509,7 @@ func TestBatchMetricProcessor_Shutdown(t *testing.T) {
require.NoError(t, batcher.Start(context.Background(), componenttest.NewNopHost()))
for requestNum := 0; requestNum < requestCount; requestNum++ {
md := testdata.GenerateMetricsManyMetricsSameResource(metricsPerRequest)
md := testdata.GenerateMetrics(metricsPerRequest)
assert.NoError(t, batcher.ConsumeMetrics(context.Background(), md))
}
@ -565,14 +565,14 @@ func getTestMetricName(requestNum, index int) string {
func BenchmarkTraceSizeBytes(b *testing.B) {
sizer := ptrace.NewProtoMarshaler().(ptrace.Sizer)
td := testdata.GenerateTracesManySpansSameResource(8192)
td := testdata.GenerateTraces(8192)
for n := 0; n < b.N; n++ {
fmt.Println(sizer.TracesSize(td))
}
}
func BenchmarkTraceSizeSpanCount(b *testing.B) {
td := testdata.GenerateTracesManySpansSameResource(8192)
td := testdata.GenerateTraces(8192)
for n := 0; n < b.N; n++ {
td.SpanCount()
}
@ -597,7 +597,7 @@ func BenchmarkBatchMetricProcessor(b *testing.B) {
mds := make([]pmetric.Metrics, 0, b.N)
for n := 0; n < b.N; n++ {
mds = append(mds,
testdata.GenerateMetricsManyMetricsSameResource(metricsPerRequest),
testdata.GenerateMetrics(metricsPerRequest),
)
}
b.StartTimer()
@ -648,7 +648,7 @@ func TestBatchLogProcessor_ReceivingData(t *testing.T) {
logDataSlice := make([]plog.Logs, 0, requestCount)
for requestNum := 0; requestNum < requestCount; requestNum++ {
ld := testdata.GenerateLogsManyLogRecordsSameResource(logsPerRequest)
ld := testdata.GenerateLogs(logsPerRequest)
logs := ld.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
for logIndex := 0; logIndex < logsPerRequest; logIndex++ {
logs.At(logIndex).SetSeverityText(getTestLogSeverityText(requestNum, logIndex))
@ -702,7 +702,7 @@ func TestBatchLogProcessor_BatchSize(t *testing.T) {
start := time.Now()
size := 0
for requestNum := 0; requestNum < requestCount; requestNum++ {
ld := testdata.GenerateLogsManyLogRecordsSameResource(logsPerRequest)
ld := testdata.GenerateLogs(logsPerRequest)
size += sizer.LogsSize(ld)
assert.NoError(t, batcher.ConsumeLogs(context.Background(), ld))
}
@ -758,7 +758,7 @@ func TestBatchLogsProcessor_Timeout(t *testing.T) {
start := time.Now()
for requestNum := 0; requestNum < requestCount; requestNum++ {
ld := testdata.GenerateLogsManyLogRecordsSameResource(logsPerRequest)
ld := testdata.GenerateLogs(logsPerRequest)
assert.NoError(t, batcher.ConsumeLogs(context.Background(), ld))
}
@ -806,7 +806,7 @@ func TestBatchLogProcessor_Shutdown(t *testing.T) {
require.NoError(t, batcher.Start(context.Background(), componenttest.NewNopHost()))
for requestNum := 0; requestNum < requestCount; requestNum++ {
ld := testdata.GenerateLogsManyLogRecordsSameResource(logsPerRequest)
ld := testdata.GenerateLogs(logsPerRequest)
assert.NoError(t, batcher.ConsumeLogs(context.Background(), ld))
}

View File

@ -24,7 +24,7 @@ import (
)
func TestSplitLogs_noop(t *testing.T) {
td := testdata.GenerateLogsManyLogRecordsSameResource(20)
td := testdata.GenerateLogs(20)
splitSize := 40
split := splitLogs(splitSize, td)
assert.Equal(t, td, split)
@ -38,7 +38,7 @@ func TestSplitLogs_noop(t *testing.T) {
}
func TestSplitLogs(t *testing.T) {
ld := testdata.GenerateLogsManyLogRecordsSameResource(20)
ld := testdata.GenerateLogs(20)
logs := ld.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
logs.At(i).SetSeverityText(getTestLogSeverityText(0, i))
@ -81,13 +81,13 @@ func TestSplitLogs(t *testing.T) {
}
func TestSplitLogsMultipleResourceLogs(t *testing.T) {
td := testdata.GenerateLogsManyLogRecordsSameResource(20)
td := testdata.GenerateLogs(20)
logs := td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
logs.At(i).SetSeverityText(getTestLogSeverityText(0, i))
}
// add second index to resource logs
testdata.GenerateLogsManyLogRecordsSameResource(20).
testdata.GenerateLogs(20).
ResourceLogs().At(0).CopyTo(td.ResourceLogs().AppendEmpty())
logs = td.ResourceLogs().At(1).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
@ -103,13 +103,13 @@ func TestSplitLogsMultipleResourceLogs(t *testing.T) {
}
func TestSplitLogsMultipleResourceLogs_split_size_greater_than_log_size(t *testing.T) {
td := testdata.GenerateLogsManyLogRecordsSameResource(20)
td := testdata.GenerateLogs(20)
logs := td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
logs.At(i).SetSeverityText(getTestLogSeverityText(0, i))
}
// add second index to resource logs
testdata.GenerateLogsManyLogRecordsSameResource(20).
testdata.GenerateLogs(20).
ResourceLogs().At(0).CopyTo(td.ResourceLogs().AppendEmpty())
logs = td.ResourceLogs().At(1).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
@ -128,7 +128,7 @@ func TestSplitLogsMultipleResourceLogs_split_size_greater_than_log_size(t *testi
}
func TestSplitLogsMultipleILL(t *testing.T) {
td := testdata.GenerateLogsManyLogRecordsSameResource(20)
td := testdata.GenerateLogs(20)
logs := td.ResourceLogs().At(0).ScopeLogs().At(0).LogRecords()
for i := 0; i < logs.Len(); i++ {
logs.At(i).SetSeverityText(getTestLogSeverityText(0, i))
@ -161,7 +161,7 @@ func BenchmarkSplitLogs(b *testing.B) {
md := plog.NewLogs()
rms := md.ResourceLogs()
for i := 0; i < 20; i++ {
testdata.GenerateLogsManyLogRecordsSameResource(20).ResourceLogs().MoveAndAppendTo(md.ResourceLogs())
testdata.GenerateLogs(20).ResourceLogs().MoveAndAppendTo(md.ResourceLogs())
ms := rms.At(rms.Len() - 1).ScopeLogs().At(0).LogRecords()
for i := 0; i < ms.Len(); i++ {
ms.At(i).SetSeverityText(getTestLogSeverityText(1, i))

View File

@ -24,7 +24,7 @@ import (
)
func TestSplitMetrics_noop(t *testing.T) {
td := testdata.GenerateMetricsManyMetricsSameResource(20)
td := testdata.GenerateMetrics(20)
splitSize := 40
split := splitMetrics(splitSize, td)
assert.Equal(t, td, split)
@ -38,7 +38,7 @@ func TestSplitMetrics_noop(t *testing.T) {
}
func TestSplitMetrics(t *testing.T) {
md := testdata.GenerateMetricsManyMetricsSameResource(20)
md := testdata.GenerateMetrics(20)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := metricDPC(metrics.At(0))
for i := 0; i < metrics.Len(); i++ {
@ -84,7 +84,7 @@ func TestSplitMetrics(t *testing.T) {
}
func TestSplitMetricsMultipleResourceSpans(t *testing.T) {
md := testdata.GenerateMetricsManyMetricsSameResource(20)
md := testdata.GenerateMetrics(20)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := metricDPC(metrics.At(0))
for i := 0; i < metrics.Len(); i++ {
@ -92,7 +92,7 @@ func TestSplitMetricsMultipleResourceSpans(t *testing.T) {
assert.Equal(t, dataPointCount, metricDPC(metrics.At(i)))
}
// add second index to resource metrics
testdata.GenerateMetricsManyMetricsSameResource(20).
testdata.GenerateMetrics(20).
ResourceMetrics().At(0).CopyTo(md.ResourceMetrics().AppendEmpty())
metrics = md.ResourceMetrics().At(1).ScopeMetrics().At(0).Metrics()
for i := 0; i < metrics.Len(); i++ {
@ -109,7 +109,7 @@ func TestSplitMetricsMultipleResourceSpans(t *testing.T) {
}
func TestSplitMetricsMultipleResourceSpans_SplitSizeGreaterThanMetricSize(t *testing.T) {
td := testdata.GenerateMetricsManyMetricsSameResource(20)
td := testdata.GenerateMetrics(20)
metrics := td.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := metricDPC(metrics.At(0))
for i := 0; i < metrics.Len(); i++ {
@ -117,7 +117,7 @@ func TestSplitMetricsMultipleResourceSpans_SplitSizeGreaterThanMetricSize(t *tes
assert.Equal(t, dataPointCount, metricDPC(metrics.At(i)))
}
// add second index to resource metrics
testdata.GenerateMetricsManyMetricsSameResource(20).
testdata.GenerateMetrics(20).
ResourceMetrics().At(0).CopyTo(td.ResourceMetrics().AppendEmpty())
metrics = td.ResourceMetrics().At(1).ScopeMetrics().At(0).Metrics()
for i := 0; i < metrics.Len(); i++ {
@ -137,7 +137,7 @@ func TestSplitMetricsMultipleResourceSpans_SplitSizeGreaterThanMetricSize(t *tes
}
func TestSplitMetricsUneven(t *testing.T) {
md := testdata.GenerateMetricsManyMetricsSameResource(10)
md := testdata.GenerateMetrics(10)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := 2
for i := 0; i < metrics.Len(); i++ {
@ -164,7 +164,7 @@ func TestSplitMetricsUneven(t *testing.T) {
}
func TestSplitMetricsAllTypes(t *testing.T) {
md := testdata.GeneratMetricsAllTypesWithSampleDatapoints()
md := testdata.GenerateMetricsAllTypes()
dataPointCount := 2
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
for i := 0; i < metrics.Len(); i++ {
@ -263,7 +263,7 @@ func TestSplitMetricsAllTypes(t *testing.T) {
}
func TestSplitMetricsBatchSizeSmallerThanDataPointCount(t *testing.T) {
md := testdata.GenerateMetricsManyMetricsSameResource(2)
md := testdata.GenerateMetrics(2)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := 2
for i := 0; i < metrics.Len(); i++ {
@ -276,37 +276,29 @@ func TestSplitMetricsBatchSizeSmallerThanDataPointCount(t *testing.T) {
splitMetric := split.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0)
assert.Equal(t, 1, split.MetricCount())
assert.Equal(t, 2, md.MetricCount())
assert.Equal(t, pmetric.MetricAggregationTemporalityCumulative, splitMetric.Sum().AggregationTemporality())
assert.Equal(t, true, splitMetric.Sum().IsMonotonic())
assert.Equal(t, "test-metric-int-0-0", splitMetric.Name())
split = splitMetrics(splitSize, md)
splitMetric = split.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0)
assert.Equal(t, 1, split.MetricCount())
assert.Equal(t, 1, md.MetricCount())
assert.Equal(t, pmetric.MetricAggregationTemporalityCumulative, splitMetric.Sum().AggregationTemporality())
assert.Equal(t, true, splitMetric.Sum().IsMonotonic())
assert.Equal(t, "test-metric-int-0-0", splitMetric.Name())
split = splitMetrics(splitSize, md)
splitMetric = split.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0)
assert.Equal(t, 1, split.MetricCount())
assert.Equal(t, 1, md.MetricCount())
assert.Equal(t, pmetric.MetricAggregationTemporalityCumulative, splitMetric.Sum().AggregationTemporality())
assert.Equal(t, true, splitMetric.Sum().IsMonotonic())
assert.Equal(t, "test-metric-int-0-1", splitMetric.Name())
split = splitMetrics(splitSize, md)
splitMetric = split.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics().At(0)
assert.Equal(t, 1, split.MetricCount())
assert.Equal(t, 1, md.MetricCount())
assert.Equal(t, pmetric.MetricAggregationTemporalityCumulative, splitMetric.Sum().AggregationTemporality())
assert.Equal(t, true, splitMetric.Sum().IsMonotonic())
assert.Equal(t, "test-metric-int-0-1", splitMetric.Name())
}
func TestSplitMetricsMultipleILM(t *testing.T) {
md := testdata.GenerateMetricsManyMetricsSameResource(20)
md := testdata.GenerateMetrics(20)
metrics := md.ResourceMetrics().At(0).ScopeMetrics().At(0).Metrics()
dataPointCount := metricDPC(metrics.At(0))
for i := 0; i < metrics.Len(); i++ {
@ -338,7 +330,7 @@ func BenchmarkSplitMetrics(b *testing.B) {
md := pmetric.NewMetrics()
rms := md.ResourceMetrics()
for i := 0; i < 20; i++ {
testdata.GenerateMetricsManyMetricsSameResource(20).ResourceMetrics().MoveAndAppendTo(md.ResourceMetrics())
testdata.GenerateMetrics(20).ResourceMetrics().MoveAndAppendTo(md.ResourceMetrics())
ms := rms.At(rms.Len() - 1).ScopeMetrics().At(0).Metrics()
for i := 0; i < ms.Len(); i++ {
ms.At(i).SetName(getTestMetricName(1, i))

View File

@ -24,7 +24,7 @@ import (
)
func TestSplitTraces_noop(t *testing.T) {
td := testdata.GenerateTracesManySpansSameResource(20)
td := testdata.GenerateTraces(20)
splitSize := 40
split := splitTraces(splitSize, td)
assert.Equal(t, td, split)
@ -38,7 +38,7 @@ func TestSplitTraces_noop(t *testing.T) {
}
func TestSplitTraces(t *testing.T) {
td := testdata.GenerateTracesManySpansSameResource(20)
td := testdata.GenerateTraces(20)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
spans.At(i).SetName(getTestSpanName(0, i))
@ -81,13 +81,13 @@ func TestSplitTraces(t *testing.T) {
}
func TestSplitTracesMultipleResourceSpans(t *testing.T) {
td := testdata.GenerateTracesManySpansSameResource(20)
td := testdata.GenerateTraces(20)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
spans.At(i).SetName(getTestSpanName(0, i))
}
// add second index to resource spans
testdata.GenerateTracesManySpansSameResource(20).
testdata.GenerateTraces(20).
ResourceSpans().At(0).CopyTo(td.ResourceSpans().AppendEmpty())
spans = td.ResourceSpans().At(1).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
@ -103,13 +103,13 @@ func TestSplitTracesMultipleResourceSpans(t *testing.T) {
}
func TestSplitTracesMultipleResourceSpans_SplitSizeGreaterThanSpanSize(t *testing.T) {
td := testdata.GenerateTracesManySpansSameResource(20)
td := testdata.GenerateTraces(20)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
spans.At(i).SetName(getTestSpanName(0, i))
}
// add second index to resource spans
testdata.GenerateTracesManySpansSameResource(20).
testdata.GenerateTraces(20).
ResourceSpans().At(0).CopyTo(td.ResourceSpans().AppendEmpty())
spans = td.ResourceSpans().At(1).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
@ -131,7 +131,7 @@ func BenchmarkCloneSpans(b *testing.B) {
td := ptrace.NewTraces()
rms := td.ResourceSpans()
for i := 0; i < 20; i++ {
testdata.GenerateTracesManySpansSameResource(20).ResourceSpans().MoveAndAppendTo(td.ResourceSpans())
testdata.GenerateTraces(20).ResourceSpans().MoveAndAppendTo(td.ResourceSpans())
ms := rms.At(rms.Len() - 1).ScopeSpans().At(0).Spans()
for i := 0; i < ms.Len(); i++ {
ms.At(i).SetName(getTestMetricName(1, i))
@ -149,7 +149,7 @@ func BenchmarkCloneSpans(b *testing.B) {
}
func TestSplitTracesMultipleILS(t *testing.T) {
td := testdata.GenerateTracesManySpansSameResource(20)
td := testdata.GenerateTraces(20)
spans := td.ResourceSpans().At(0).ScopeSpans().At(0).Spans()
for i := 0; i < spans.Len(); i++ {
spans.At(i).SetName(getTestSpanName(0, i))
@ -182,7 +182,7 @@ func BenchmarkSplitTraces(b *testing.B) {
td := ptrace.NewTraces()
rms := td.ResourceSpans()
for i := 0; i < 20; i++ {
testdata.GenerateTracesManySpansSameResource(20).ResourceSpans().MoveAndAppendTo(td.ResourceSpans())
testdata.GenerateTraces(20).ResourceSpans().MoveAndAppendTo(td.ResourceSpans())
ms := rms.At(rms.Len() - 1).ScopeSpans().At(0).Spans()
for i := 0; i < ms.Len(); i++ {
ms.At(i).SetName(getTestMetricName(1, i))

View File

@ -43,7 +43,7 @@ func TestExport(t *testing.T) {
require.NoError(t, err, "Failed to create the TraceServiceClient: %v", err)
defer traceClientDoneFn()
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
// Keep log data to compare the test result against it
// Clone needed because OTLP proto XXX_ fields are altered in the GRPC downstream
logData := ld.Clone()
@ -81,7 +81,7 @@ func TestExport_ErrorConsumer(t *testing.T) {
require.NoError(t, err, "Failed to create the TraceServiceClient: %v", err)
defer logClientDoneFn()
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
req := plogotlp.NewRequestFromLogs(ld)
resp, err := logClient.Export(context.Background(), req)

View File

@ -43,7 +43,7 @@ func TestExport(t *testing.T) {
require.NoError(t, err, "Failed to create the MetricsServiceClient: %v", err)
defer metricsClientDoneFn()
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
// Keep metric data to compare the test result against it
// Clone needed because OTLP proto XXX_ fields are altered in the GRPC downstream
@ -83,7 +83,7 @@ func TestExport_ErrorConsumer(t *testing.T) {
require.NoError(t, err, "Failed to create the MetricsServiceClient: %v", err)
defer metricsClientDoneFn()
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
req := pmetricotlp.NewRequestFromMetrics(md)
resp, err := metricsClient.Export(context.Background(), req)

View File

@ -43,7 +43,7 @@ func TestExport(t *testing.T) {
require.NoError(t, err, "Failed to create the TraceServiceClient: %v", err)
defer traceClientDoneFn()
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
// Keep trace data to compare the test result against it
// Clone needed because OTLP proto XXX_ fields are altered in the GRPC downstream
@ -81,7 +81,7 @@ func TestExport_ErrorConsumer(t *testing.T) {
require.NoError(t, err, "Failed to create the TraceServiceClient: %v", err)
defer traceClientDoneFn()
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
req := ptraceotlp.NewRequestFromTraces(td)
resp, err := traceClient.Export(context.Background(), req)
assert.EqualError(t, err, "rpc error: code = Unknown desc = my error")

View File

@ -422,7 +422,7 @@ func TestProtoHttp(t *testing.T) {
// Wait for the servers to start
<-time.After(10 * time.Millisecond)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
traceBytes, err := ptrace.NewProtoMarshaler().MarshalTraces(td)
if err != nil {
t.Errorf("Error marshaling protobuf: %v", err)
@ -645,7 +645,7 @@ func TestOTLPReceiverTrace_HandleNextConsumerResponse(t *testing.T) {
}
addr := testutil.GetAvailableLocalAddress(t)
req := testdata.GenerateTracesOneSpan()
req := testdata.GenerateTraces(1)
exporters := []struct {
receiverTag string
@ -745,7 +745,7 @@ func TestGRPCMaxRecvSize(t *testing.T) {
cc, err := grpc.Dial(addr, grpc.WithTransportCredentials(insecure.NewCredentials()), grpc.WithBlock())
require.NoError(t, err)
td := testdata.GenerateTracesManySpansSameResource(50000)
td := testdata.GenerateTraces(50000)
require.Error(t, exportTraces(cc, td))
cc.Close()
require.NoError(t, ocr.Shutdown(context.Background()))
@ -761,7 +761,7 @@ func TestGRPCMaxRecvSize(t *testing.T) {
require.NoError(t, err)
defer cc.Close()
td = testdata.GenerateTracesManySpansSameResource(50000)
td = testdata.GenerateTraces(50000)
require.NoError(t, exportTraces(cc, td))
require.Len(t, sink.AllTraces(), 1)
assert.Equal(t, td, sink.AllTraces()[0])
@ -978,12 +978,12 @@ loop:
break loop
default:
}
senderFn(testdata.GenerateTracesOneSpan())
senderFn(testdata.GenerateTraces(1))
}
// After getting the signal to stop, send one more span and then
// finally stop. We should never receive this last span.
senderFn(testdata.GenerateTracesOneSpan())
senderFn(testdata.GenerateTraces(1))
// Indicate that we are done.
close(doneSignal)

View File

@ -39,7 +39,7 @@ func TestLogsMultiplexingNonMutating(t *testing.T) {
lfc := NewLogs([]consumer.Logs{p1, p2, p3})
assert.False(t, lfc.Capabilities().MutatesData)
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
for i := 0; i < 2; i++ {
err := lfc.ConsumeLogs(context.Background(), ld)
@ -72,7 +72,7 @@ func TestLogsMultiplexingMutating(t *testing.T) {
lfc := NewLogs([]consumer.Logs{p1, p2, p3})
assert.False(t, lfc.Capabilities().MutatesData)
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
for i := 0; i < 2; i++ {
err := lfc.ConsumeLogs(context.Background(), ld)
@ -106,7 +106,7 @@ func TestLogsMultiplexingMixLastMutating(t *testing.T) {
lfc := NewLogs([]consumer.Logs{p1, p2, p3})
assert.False(t, lfc.Capabilities().MutatesData)
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
for i := 0; i < 2; i++ {
err := lfc.ConsumeLogs(context.Background(), ld)
@ -141,7 +141,7 @@ func TestLogsMultiplexingMixLastNonMutating(t *testing.T) {
lfc := NewLogs([]consumer.Logs{p1, p2, p3})
assert.False(t, lfc.Capabilities().MutatesData)
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
for i := 0; i < 2; i++ {
err := lfc.ConsumeLogs(context.Background(), ld)
@ -174,7 +174,7 @@ func TestLogsWhenErrors(t *testing.T) {
p3 := new(consumertest.LogsSink)
lfc := NewLogs([]consumer.Logs{p1, p2, p3})
ld := testdata.GenerateLogsOneLogRecord()
ld := testdata.GenerateLogs(1)
for i := 0; i < 2; i++ {
assert.Error(t, lfc.ConsumeLogs(context.Background(), ld))

View File

@ -39,7 +39,7 @@ func TestMetricsMultiplexingNonMutating(t *testing.T) {
mfc := NewMetrics([]consumer.Metrics{p1, p2, p3})
assert.False(t, mfc.Capabilities().MutatesData)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
for i := 0; i < 2; i++ {
err := mfc.ConsumeMetrics(context.Background(), md)
@ -72,7 +72,7 @@ func TestMetricsMultiplexingMutating(t *testing.T) {
mfc := NewMetrics([]consumer.Metrics{p1, p2, p3})
assert.False(t, mfc.Capabilities().MutatesData)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
for i := 0; i < 2; i++ {
err := mfc.ConsumeMetrics(context.Background(), md)
@ -106,7 +106,7 @@ func TestMetricsMultiplexingMixLastMutating(t *testing.T) {
mfc := NewMetrics([]consumer.Metrics{p1, p2, p3})
assert.False(t, mfc.Capabilities().MutatesData)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
for i := 0; i < 2; i++ {
err := mfc.ConsumeMetrics(context.Background(), md)
@ -141,7 +141,7 @@ func TestMetricsMultiplexingMixLastNonMutating(t *testing.T) {
mfc := NewMetrics([]consumer.Metrics{p1, p2, p3})
assert.False(t, mfc.Capabilities().MutatesData)
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
for i := 0; i < 2; i++ {
err := mfc.ConsumeMetrics(context.Background(), md)
@ -174,7 +174,7 @@ func TestMetricsWhenErrors(t *testing.T) {
p3 := new(consumertest.MetricsSink)
mfc := NewMetrics([]consumer.Metrics{p1, p2, p3})
md := testdata.GenerateMetricsOneMetric()
md := testdata.GenerateMetrics(1)
for i := 0; i < 2; i++ {
assert.Error(t, mfc.ConsumeMetrics(context.Background(), md))

View File

@ -39,7 +39,7 @@ func TestTracesMultiplexingNonMutating(t *testing.T) {
tfc := NewTraces([]consumer.Traces{p1, p2, p3})
assert.False(t, tfc.Capabilities().MutatesData)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
for i := 0; i < 2; i++ {
err := tfc.ConsumeTraces(context.Background(), td)
@ -72,7 +72,7 @@ func TestTracesMultiplexingMutating(t *testing.T) {
tfc := NewTraces([]consumer.Traces{p1, p2, p3})
assert.False(t, tfc.Capabilities().MutatesData)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
for i := 0; i < 2; i++ {
err := tfc.ConsumeTraces(context.Background(), td)
@ -106,7 +106,7 @@ func TestTracesMultiplexingMixLastMutating(t *testing.T) {
tfc := NewTraces([]consumer.Traces{p1, p2, p3})
assert.False(t, tfc.Capabilities().MutatesData)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
for i := 0; i < 2; i++ {
err := tfc.ConsumeTraces(context.Background(), td)
@ -141,7 +141,7 @@ func TestTracesMultiplexingMixLastNonMutating(t *testing.T) {
tfc := NewTraces([]consumer.Traces{p1, p2, p3})
assert.False(t, tfc.Capabilities().MutatesData)
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
for i := 0; i < 2; i++ {
err := tfc.ConsumeTraces(context.Background(), td)
@ -174,7 +174,7 @@ func TestTracesWhenErrors(t *testing.T) {
p3 := new(consumertest.TracesSink)
tfc := NewTraces([]consumer.Traces{p1, p2, p3})
td := testdata.GenerateTracesOneSpan()
td := testdata.GenerateTraces(1)
for i := 0; i < 2; i++ {
assert.Error(t, tfc.ConsumeTraces(context.Background(), td))

View File

@ -33,9 +33,9 @@ func TestWrapLogs(t *testing.T) {
wrap := wrapLogs(sink, consumer.Capabilities{MutatesData: true})
assert.Equal(t, consumer.Capabilities{MutatesData: true}, wrap.Capabilities())
assert.NoError(t, wrap.ConsumeLogs(context.Background(), testdata.GenerateLogsOneLogRecord()))
assert.NoError(t, wrap.ConsumeLogs(context.Background(), testdata.GenerateLogs(1)))
assert.Len(t, sink.AllLogs(), 1)
assert.Equal(t, testdata.GenerateLogsOneLogRecord(), sink.AllLogs()[0])
assert.Equal(t, testdata.GenerateLogs(1), sink.AllLogs()[0])
}
func TestWrapMetrics(t *testing.T) {
@ -45,9 +45,9 @@ func TestWrapMetrics(t *testing.T) {
wrap := wrapMetrics(sink, consumer.Capabilities{MutatesData: true})
assert.Equal(t, consumer.Capabilities{MutatesData: true}, wrap.Capabilities())
assert.NoError(t, wrap.ConsumeMetrics(context.Background(), testdata.GenerateMetricsOneMetric()))
assert.NoError(t, wrap.ConsumeMetrics(context.Background(), testdata.GenerateMetrics(1)))
assert.Len(t, sink.AllMetrics(), 1)
assert.Equal(t, testdata.GenerateMetricsOneMetric(), sink.AllMetrics()[0])
assert.Equal(t, testdata.GenerateMetrics(1), sink.AllMetrics()[0])
}
func TestWrapTraces(t *testing.T) {
@ -57,7 +57,7 @@ func TestWrapTraces(t *testing.T) {
wrap := wrapTraces(sink, consumer.Capabilities{MutatesData: true})
assert.Equal(t, consumer.Capabilities{MutatesData: true}, wrap.Capabilities())
assert.NoError(t, wrap.ConsumeTraces(context.Background(), testdata.GenerateTracesOneSpan()))
assert.NoError(t, wrap.ConsumeTraces(context.Background(), testdata.GenerateTraces(1)))
assert.Len(t, sink.AllTraces(), 1)
assert.Equal(t, testdata.GenerateTracesOneSpan(), sink.AllTraces()[0])
assert.Equal(t, testdata.GenerateTraces(1), sink.AllTraces()[0])
}

View File

@ -135,17 +135,17 @@ func TestBuild(t *testing.T) {
traceReceiver := pipelines.allReceivers[config.TracesDataType][recvID].(*testcomponents.ExampleReceiver)
assert.True(t, traceReceiver.Started)
// Send traces.
assert.NoError(t, traceReceiver.ConsumeTraces(context.Background(), testdata.GenerateTracesOneSpan()))
assert.NoError(t, traceReceiver.ConsumeTraces(context.Background(), testdata.GenerateTraces(1)))
metricsReceiver := pipelines.allReceivers[config.MetricsDataType][recvID].(*testcomponents.ExampleReceiver)
assert.True(t, metricsReceiver.Started)
// Send metrics.
assert.NoError(t, metricsReceiver.ConsumeMetrics(context.Background(), testdata.GenerateMetricsOneMetric()))
assert.NoError(t, metricsReceiver.ConsumeMetrics(context.Background(), testdata.GenerateMetrics(1)))
logsReceiver := pipelines.allReceivers[config.LogsDataType][recvID].(*testcomponents.ExampleReceiver)
assert.True(t, logsReceiver.Started)
// Send logs.
assert.NoError(t, logsReceiver.ConsumeLogs(context.Background(), testdata.GenerateLogsOneLogRecord()))
assert.NoError(t, logsReceiver.ConsumeLogs(context.Background(), testdata.GenerateLogs(1)))
}
assert.NoError(t, pipelines.ShutdownAll(context.Background()))
@ -181,19 +181,19 @@ func TestBuild(t *testing.T) {
// Validate traces.
traceExporter := pipelines.GetExporters()[config.TracesDataType][expID].(*testcomponents.ExampleExporter)
require.Len(t, traceExporter.Traces, test.expectedRequests)
assert.EqualValues(t, testdata.GenerateTracesOneSpan(), traceExporter.Traces[0])
assert.EqualValues(t, testdata.GenerateTraces(1), traceExporter.Traces[0])
assert.True(t, traceExporter.Stopped)
// Validate metrics.
metricsExporter := pipelines.GetExporters()[config.MetricsDataType][expID].(*testcomponents.ExampleExporter)
require.Len(t, metricsExporter.Metrics, test.expectedRequests)
assert.EqualValues(t, testdata.GenerateMetricsOneMetric(), metricsExporter.Metrics[0])
assert.EqualValues(t, testdata.GenerateMetrics(1), metricsExporter.Metrics[0])
assert.True(t, metricsExporter.Stopped)
// Validate logs.
logsExporter := pipelines.GetExporters()[config.LogsDataType][expID].(*testcomponents.ExampleExporter)
require.Len(t, logsExporter.Logs, test.expectedRequests)
assert.EqualValues(t, testdata.GenerateLogsOneLogRecord(), logsExporter.Logs[0])
assert.EqualValues(t, testdata.GenerateLogs(1), logsExporter.Logs[0])
assert.True(t, logsExporter.Stopped)
}
})