Remove usage of custom init/stop in scraper and use start/shutdown from component (#2193)

Signed-off-by: Bogdan Drutu <bogdandrutu@gmail.com>
This commit is contained in:
Bogdan Drutu 2020-11-23 14:00:42 -05:00 committed by GitHub
parent a460a4a6c0
commit 7874cd5faa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 142 additions and 151 deletions

View File

@ -30,11 +30,11 @@ const totalInstanceName = "_Total"
// PerfCounterScraper scrapes performance counter data.
type PerfCounterScraper interface {
// Initialize initializes the PerfCounterScraper so that subsequent calls
// to Scrape will return performance counter data for the specified set.
// start initializes the PerfCounterScraper so that subsequent calls
// to scrape will return performance counter data for the specified set.
// of objects
Initialize(objects ...string) error
// Scrape returns performance data for the initialized objects.
// scrape returns performance data for the initialized objects.
Scrape() (PerfDataCollection, error)
}

View File

@ -23,7 +23,7 @@ import (
)
// MockPerfCounterScraperError is an implementation of PerfCounterScraper that returns
// the supplied errors when Scrape, GetObject, or GetValues are called.
// the supplied errors when scrape, GetObject, or GetValues are called.
type MockPerfCounterScraperError struct {
scrapeErr error
getObjectErr error
@ -36,12 +36,12 @@ func NewMockPerfCounterScraperError(scrapeErr, getObjectErr, getValuesErr error)
return &MockPerfCounterScraperError{scrapeErr: scrapeErr, getObjectErr: getObjectErr, getValuesErr: getValuesErr}
}
// Initialize is a no-op
// start is a no-op
func (p *MockPerfCounterScraperError) Initialize(objects ...string) error {
return nil
}
// Scrape returns the specified scrapeErr or an object that will return a subsequent error
// scrape returns the specified scrapeErr or an object that will return a subsequent error
// if scrapeErr is nil
func (p *MockPerfCounterScraperError) Scrape() (PerfDataCollection, error) {
if p.scrapeErr != nil {
@ -80,7 +80,7 @@ func (obj mockPerfDataObjectError) GetValues(counterNames ...string) ([]*Counter
}
// MockPerfCounterScraper is an implementation of PerfCounterScraper that returns the supplied
// object / counter values on each successive call to Scrape, in the specified order.
// object / counter values on each successive call to scrape, in the specified order.
//
// Example Usage:
//
@ -91,7 +91,7 @@ func (obj mockPerfDataObjectError) GetValues(counterNames ...string) ([]*Counter
// },
// })
//
// s.Scrape().GetObject("Object1").GetValues("Counter1", "Counter2")
// s.scrape().GetObject("Object1").GetValues("Counter1", "Counter2")
//
// ... 1st call returns []*CounterValues{ { Values: { "Counter1": 1, "Counter2": 4 } } }
// ... 2nd call returns []*CounterValues{ { Values: { "Counter1": 2, "Counter2": 4 } } }
@ -101,17 +101,17 @@ type MockPerfCounterScraper struct {
}
// NewMockPerfCounterScraper returns a MockPerfCounterScraper that will return the supplied
// object / counter values on each successive call to Scrape, in the specified order.
// object / counter values on each successive call to scrape, in the specified order.
func NewMockPerfCounterScraper(objectsAndValuesToReturn map[string]map[string][]int64) *MockPerfCounterScraper {
return &MockPerfCounterScraper{objectsAndValuesToReturn: objectsAndValuesToReturn}
}
// Initialize is a no-op
// start is a no-op
func (p *MockPerfCounterScraper) Initialize(objects ...string) error {
return nil
}
// Scrape returns a perf data collection with the supplied object / counter values,
// scrape returns a perf data collection with the supplied object / counter values,
// according to the supplied order.
func (p *MockPerfCounterScraper) Scrape() (PerfDataCollection, error) {
objectsAndValuesToReturn := make(map[string]map[string]int64, len(p.objectsAndValuesToReturn))

View File

@ -21,6 +21,7 @@ import (
"github.com/shirou/gopsutil/cpu"
"github.com/shirou/gopsutil/host"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -44,8 +45,7 @@ func newCPUScraper(_ context.Context, cfg *Config) *scraper {
return &scraper{config: cfg, bootTime: host.BootTime, times: cpu.Times}
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -55,8 +55,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
now := internal.TimeToUnixNano(time.Now())

View File

@ -24,6 +24,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -71,14 +72,14 @@ func TestScrape(t *testing.T) {
scraper.times = test.timesFunc
}
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
if test.initializationErr != "" {
assert.EqualError(t, err, test.initializationErr)
return
}
require.NoError(t, err, "Failed to initialize cpu scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
if test.expectedErr != "" {
assert.EqualError(t, err, test.expectedErr)

View File

@ -50,8 +50,8 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -24,6 +24,7 @@ import (
"github.com/shirou/gopsutil/disk"
"github.com/shirou/gopsutil/host"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -70,8 +71,7 @@ func newDiskScraper(_ context.Context, cfg *Config) (*scraper, error) {
return scraper, nil
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -81,8 +81,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
now := internal.TimeToUnixNano(time.Now())

View File

@ -25,6 +25,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
)
@ -52,10 +53,10 @@ func TestScrape_Others(t *testing.T) {
scraper.ioCounters = test.ioCountersFunc
}
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize disk scraper: %v", err)
_, err = scraper.Scrape(context.Background())
_, err = scraper.scrape(context.Background())
assert.EqualError(t, err, test.expectedErr)
isPartial := consumererror.IsPartialScrapeError(err)

View File

@ -23,6 +23,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -86,14 +87,14 @@ func TestScrape(t *testing.T) {
scraper.bootTime = test.bootTimeFunc
}
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
if test.initializationErr != "" {
assert.EqualError(t, err, test.initializationErr)
return
}
require.NoError(t, err, "Failed to initialize disk scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
require.NoError(t, err, "Failed to scrape metrics: %v", err)
if !test.expectMetrics {

View File

@ -21,6 +21,7 @@ import (
"github.com/shirou/gopsutil/host"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -83,8 +84,7 @@ func newDiskScraper(_ context.Context, cfg *Config) (*scraper, error) {
return scraper, nil
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -95,8 +95,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return s.perfCounterScraper.Initialize(logicalDisk)
}
// Scrape
func (s *scraper) Scrape(ctx context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(ctx context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
now := internal.TimeToUnixNano(time.Now())

View File

@ -24,6 +24,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/perfcounters"
)
@ -62,10 +63,10 @@ func TestScrape_Error(t *testing.T) {
scraper.perfCounterScraper = perfcounters.NewMockPerfCounterScraperError(test.scrapeErr, test.getObjectErr, test.getValuesErr)
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize disk scraper: %v", err)
_, err = scraper.Scrape(context.Background())
_, err = scraper.scrape(context.Background())
assert.EqualError(t, err, test.expectedErr)
isPartial := consumererror.IsPartialScrapeError(err)

View File

@ -53,8 +53,8 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -50,9 +50,9 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
scraperhelper.WithClose(s.Close),
s.scrape,
scraperhelper.WithStart(s.start),
scraperhelper.WithShutdown(s.shutdown),
)
return ms, nil

View File

@ -21,6 +21,7 @@ import (
"github.com/shirou/gopsutil/load"
"go.uber.org/zap"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -42,18 +43,18 @@ func newLoadScraper(_ context.Context, logger *zap.Logger, cfg *Config) *scraper
return &scraper{logger: logger, config: cfg, load: getSampledLoadAverages}
}
// Initialize
func (s *scraper) Initialize(ctx context.Context) error {
// start
func (s *scraper) start(ctx context.Context, _ component.Host) error {
return startSampling(ctx, s.logger)
}
// Close
func (s *scraper) Close(ctx context.Context) error {
// shutdown
func (s *scraper) shutdown(ctx context.Context) error {
return stopSampling(ctx)
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
// scrape
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
now := internal.TimeToUnixNano(time.Now())

View File

@ -24,6 +24,7 @@ import (
"github.com/stretchr/testify/require"
"go.uber.org/zap"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -54,11 +55,11 @@ func TestScrape(t *testing.T) {
scraper.load = test.loadFunc
}
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize load scraper: %v", err)
defer func() { assert.NoError(t, scraper.Close(context.Background())) }()
defer func() { assert.NoError(t, scraper.shutdown(context.Background())) }()
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
if test.expectedErr != "" {
assert.EqualError(t, err, test.expectedErr)

View File

@ -53,8 +53,8 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -22,6 +22,7 @@ import (
"github.com/shirou/gopsutil/host"
"github.com/shirou/gopsutil/net"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -70,8 +71,7 @@ func newNetworkScraper(_ context.Context, cfg *Config) (*scraper, error) {
return scraper, nil
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -81,8 +81,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
var errors []error

View File

@ -23,6 +23,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -109,14 +110,14 @@ func TestScrape(t *testing.T) {
scraper.connections = test.connectionsFunc
}
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
if test.initializationErr != "" {
assert.EqualError(t, err, test.initializationErr)
return
}
require.NoError(t, err, "Failed to initialize network scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
if test.expectedErr != "" {
assert.EqualError(t, err, test.expectedErr)

View File

@ -50,8 +50,8 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -20,6 +20,7 @@ import (
"github.com/shirou/gopsutil/host"
"github.com/shirou/gopsutil/load"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/pdata"
)
@ -41,8 +42,7 @@ func newProcessesScraper(_ context.Context, cfg *Config) *scraper {
return &scraper{config: cfg, misc: load.Misc}
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := host.BootTime()
if err != nil {
return err
@ -52,8 +52,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
err := appendSystemSpecificProcessesMetrics(metrics, 0, s.misc)
return metrics, err

View File

@ -24,6 +24,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -63,10 +64,10 @@ func TestScrape(t *testing.T) {
scraper.misc = test.miscFunc
}
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize processes scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
if len(expectedMetrics) > 0 && test.expectedErr != "" {
assert.EqualError(t, err, test.expectedErr)

View File

@ -59,8 +59,8 @@ func (f *Factory) CreateResourceMetricsScraper(
ms := scraperhelper.NewResourceMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -23,6 +23,7 @@ import (
"github.com/shirou/gopsutil/host"
"github.com/shirou/gopsutil/process"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -73,8 +74,7 @@ func newProcessScraper(cfg *Config) (*scraper, error) {
return scraper, nil
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -84,8 +84,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.ResourceMetricsSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.ResourceMetricsSlice, error) {
rms := pdata.NewResourceMetricsSlice()
var errs []error

View File

@ -27,6 +27,7 @@ import (
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/internal/processor/filterset"
@ -49,10 +50,10 @@ func TestScrape(t *testing.T) {
scraper, err := newProcessScraper(&Config{})
scraper.bootTime = func() (uint64, error) { return bootTime, nil }
require.NoError(t, err, "Failed to create process scraper: %v", err)
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize process scraper: %v", err)
resourceMetrics, err := scraper.Scrape(context.Background())
resourceMetrics, err := scraper.scrape(context.Background())
// may receive some partial errors as a result of attempting to:
// a) read native system processes on Windows (e.g. Registry process)
@ -167,10 +168,10 @@ func TestScrapeMetrics_GetProcessesError(t *testing.T) {
scraper.getProcessHandles = func() (processHandles, error) { return nil, errors.New("err1") }
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize process scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
assert.EqualError(t, err, "err1")
assert.Equal(t, 0, metrics.Len())
assert.False(t, consumererror.IsPartialScrapeError(err))
@ -311,7 +312,7 @@ func TestScrapeMetrics_Filtered(t *testing.T) {
scraper, err := newProcessScraper(config)
require.NoError(t, err, "Failed to create process scraper: %v", err)
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize process scraper: %v", err)
handles := make([]*processHandleMock, 0, len(test.names))
@ -326,7 +327,7 @@ func TestScrapeMetrics_Filtered(t *testing.T) {
return &processHandlesMock{handles: handles}, nil
}
resourceMetrics, err := scraper.Scrape(context.Background())
resourceMetrics, err := scraper.scrape(context.Background())
require.NoError(t, err)
assert.Equal(t, len(test.expectedNames), resourceMetrics.Len())
@ -415,7 +416,7 @@ func TestScrapeMetrics_ProcessErrors(t *testing.T) {
scraper, err := newProcessScraper(&Config{})
require.NoError(t, err, "Failed to create process scraper: %v", err)
err = scraper.Initialize(context.Background())
err = scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize process scraper: %v", err)
username := "username"
@ -437,7 +438,7 @@ func TestScrapeMetrics_ProcessErrors(t *testing.T) {
return &processHandlesMock{handles: []*processHandleMock{handleMock}}, nil
}
resourceMetrics, err := scraper.Scrape(context.Background())
resourceMetrics, err := scraper.scrape(context.Background())
md := pdata.NewMetrics()
resourceMetrics.MoveAndAppendTo(md.ResourceMetrics())

View File

@ -50,8 +50,8 @@ func (f *Factory) CreateMetricsScraper(
ms := scraperhelper.NewMetricsScraper(
TypeStr,
s.Scrape,
scraperhelper.WithInitialize(s.Initialize),
s.scrape,
scraperhelper.WithStart(s.start),
)
return ms, nil

View File

@ -23,6 +23,7 @@ import (
"github.com/shirou/gopsutil/host"
"github.com/shirou/gopsutil/mem"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -50,8 +51,7 @@ func newSwapScraper(_ context.Context, cfg *Config) *scraper {
return &scraper{config: cfg, bootTime: host.BootTime, virtualMemory: mem.VirtualMemory, swapMemory: mem.SwapMemory}
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -61,8 +61,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return nil
}
// Scrape
func (s *scraper) Scrape(_ context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(_ context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
var errors []error

View File

@ -25,6 +25,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
)
@ -69,10 +70,10 @@ func TestScrape_Errors(t *testing.T) {
scraper.swapMemory = test.swapMemoryFunc
}
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize swap scraper: %v", err)
_, err = scraper.Scrape(context.Background())
_, err = scraper.scrape(context.Background())
assert.EqualError(t, err, test.expectedError)
isPartial := consumererror.IsPartialScrapeError(err)

View File

@ -23,6 +23,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
)
@ -58,14 +59,14 @@ func TestScrape(t *testing.T) {
scraper.bootTime = test.bootTimeFunc
}
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
if test.initializationErr != "" {
assert.EqualError(t, err, test.initializationErr)
return
}
require.NoError(t, err, "Failed to initialize swap scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
require.NoError(t, err)
// expect 3 metrics (windows does not currently support page_faults metric)

View File

@ -23,6 +23,7 @@ import (
"github.com/shirou/gopsutil/host"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal"
@ -66,8 +67,7 @@ func newSwapScraper(_ context.Context, cfg *Config) *scraper {
return &scraper{config: cfg, pageSize: pageSize, perfCounterScraper: &perfcounters.PerfLibScraper{}, bootTime: host.BootTime, pageFileStats: getPageFileStats}
}
// Initialize
func (s *scraper) Initialize(_ context.Context) error {
func (s *scraper) start(context.Context, component.Host) error {
bootTime, err := s.bootTime()
if err != nil {
return err
@ -78,8 +78,7 @@ func (s *scraper) Initialize(_ context.Context) error {
return s.perfCounterScraper.Initialize(memory)
}
// Scrape
func (s *scraper) Scrape(context.Context) (pdata.MetricSlice, error) {
func (s *scraper) scrape(context.Context) (pdata.MetricSlice, error) {
metrics := pdata.NewMetricSlice()
var errors []error

View File

@ -24,6 +24,7 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/receiver/hostmetricsreceiver/internal/perfcounters"
)
@ -102,10 +103,10 @@ func TestScrape_Errors(t *testing.T) {
}
scraper.perfCounterScraper = perfcounters.NewMockPerfCounterScraperError(test.scrapeErr, test.getObjectErr, test.getValuesErr)
err := scraper.Initialize(context.Background())
err := scraper.start(context.Background(), componenttest.NewNopHost())
require.NoError(t, err, "Failed to initialize swap scraper: %v", err)
metrics, err := scraper.Scrape(context.Background())
metrics, err := scraper.scrape(context.Background())
if test.expectedErr != "" {
assert.EqualError(t, err, test.expectedErr)

View File

@ -18,6 +18,7 @@ import (
"context"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/component/componenthelper"
"go.opentelemetry.io/collector/consumer/pdata"
"go.opentelemetry.io/collector/obsreport"
)
@ -28,16 +29,8 @@ type ScrapeMetrics func(context.Context) (pdata.MetricSlice, error)
// Scrape resource metrics.
type ScrapeResourceMetrics func(context.Context) (pdata.ResourceMetricsSlice, error)
// Initialize performs any timely initialization tasks such as
// setting up performance counters for initial collection.
type Initialize func(ctx context.Context) error
// Close should clean up any unmanaged resources such as
// performance counter handles.
type Close func(ctx context.Context) error
// ScraperOption apply changes to internal options.
type ScraperOption func(*baseScraper)
type ScraperOption func(*componenthelper.ComponentSettings)
type BaseScraper interface {
component.Component
@ -61,40 +54,25 @@ type ResourceMetricsScraper interface {
var _ BaseScraper = (*baseScraper)(nil)
type baseScraper struct {
name string
initialize Initialize
close Close
component.Component
name string
}
func (b baseScraper) Name() string {
return b.name
}
func (b baseScraper) Start(ctx context.Context, _ component.Host) error {
if b.initialize == nil {
return nil
}
return b.initialize(ctx)
}
func (b baseScraper) Shutdown(ctx context.Context) error {
if b.close == nil {
return nil
}
return b.close(ctx)
}
// WithInitialize sets the function that will be called on startup.
func WithInitialize(initialize Initialize) ScraperOption {
return func(o *baseScraper) {
o.initialize = initialize
// WithStart sets the function that will be called on startup.
func WithStart(start componenthelper.Start) ScraperOption {
return func(s *componenthelper.ComponentSettings) {
s.Start = start
}
}
// WithClose sets the function that will be called on shutdown.
func WithClose(close Close) ScraperOption {
return func(o *baseScraper) {
o.close = close
// WithShutdown sets the function that will be called on shutdown.
func WithShutdown(shutdown componenthelper.Shutdown) ScraperOption {
return func(s *componenthelper.ComponentSettings) {
s.Shutdown = shutdown
}
}
@ -113,13 +91,17 @@ func NewMetricsScraper(
scrape ScrapeMetrics,
options ...ScraperOption,
) MetricsScraper {
ms := &metricsScraper{
baseScraper: baseScraper{name: name},
ScrapeMetrics: scrape,
set := componenthelper.DefaultComponentSettings()
for _, op := range options {
op(set)
}
for _, op := range options {
op(&ms.baseScraper)
ms := &metricsScraper{
baseScraper: baseScraper{
Component: componenthelper.NewComponent(set),
name: name,
},
ScrapeMetrics: scrape,
}
return ms
@ -148,13 +130,17 @@ func NewResourceMetricsScraper(
scrape ScrapeResourceMetrics,
options ...ScraperOption,
) ResourceMetricsScraper {
rms := &resourceMetricsScraper{
baseScraper: baseScraper{name: name},
ScrapeResourceMetrics: scrape,
set := componenthelper.DefaultComponentSettings()
for _, op := range options {
op(set)
}
for _, op := range options {
op(&rms.baseScraper)
rms := &resourceMetricsScraper{
baseScraper: baseScraper{
Component: componenthelper.NewComponent(set),
name: name,
},
ScrapeResourceMetrics: scrape,
}
return rms

View File

@ -26,6 +26,7 @@ import (
"go.opencensus.io/trace"
"go.uber.org/zap"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/component/componenterror"
"go.opentelemetry.io/collector/component/componenttest"
"go.opentelemetry.io/collector/consumer"
@ -40,7 +41,7 @@ type testInitialize struct {
err error
}
func (ts *testInitialize) initialize(context.Context) error {
func (ts *testInitialize) start(context.Context, component.Host) error {
ts.ch <- true
return ts.err
}
@ -50,7 +51,7 @@ type testClose struct {
err error
}
func (ts *testClose) close(context.Context) error {
func (ts *testClose) shutdown(context.Context) error {
ts.ch <- true
return ts.err
}
@ -224,7 +225,7 @@ func TestScrapeController(t *testing.T) {
if expectedStartErr != nil {
assert.Equal(t, expectedStartErr, err)
} else if test.initialize {
assertChannelsCalled(t, initializeChs, "initialize was not called")
assertChannelsCalled(t, initializeChs, "start was not called")
}
const iterations = 5
@ -265,7 +266,7 @@ func TestScrapeController(t *testing.T) {
if expectedShutdownErr != nil {
assert.EqualError(t, err, expectedShutdownErr.Error())
} else if test.close {
assertChannelsCalled(t, closeChs, "close was not called")
assertChannelsCalled(t, closeChs, "shutdown was not called")
}
})
}
@ -279,12 +280,12 @@ func configureMetricOptions(test metricsTestCase, initializeChs []chan bool, scr
if test.initialize {
initializeChs[i] = make(chan bool, 1)
ti := &testInitialize{ch: initializeChs[i], err: test.initializeErr}
scraperOptions = append(scraperOptions, WithInitialize(ti.initialize))
scraperOptions = append(scraperOptions, WithStart(ti.start))
}
if test.close {
closeChs[i] = make(chan bool, 1)
tc := &testClose{ch: closeChs[i], err: test.closeErr}
scraperOptions = append(scraperOptions, WithClose(tc.close))
scraperOptions = append(scraperOptions, WithShutdown(tc.shutdown))
}
scrapeMetricsChs[i] = make(chan int)
@ -297,12 +298,12 @@ func configureMetricOptions(test metricsTestCase, initializeChs []chan bool, scr
if test.initialize {
initializeChs[test.scrapers+i] = make(chan bool, 1)
ti := &testInitialize{ch: initializeChs[test.scrapers+i], err: test.initializeErr}
scraperOptions = append(scraperOptions, WithInitialize(ti.initialize))
scraperOptions = append(scraperOptions, WithStart(ti.start))
}
if test.close {
closeChs[test.scrapers+i] = make(chan bool, 1)
tc := &testClose{ch: closeChs[test.scrapers+i], err: test.closeErr}
scraperOptions = append(scraperOptions, WithClose(tc.close))
scraperOptions = append(scraperOptions, WithShutdown(tc.shutdown))
}
testScrapeResourceMetricsChs[i] = make(chan int)

View File

@ -227,7 +227,7 @@ var _escData = map[string]*_escFile{
name: "component_header.html",
local: "templates/component_header.html",
size: 156,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/1SMsQqDMBRFd7/iIq7q5lBiltKt9B8CPklQX6R1e9x/L6ZQ2vXcc65ZE3AZ0V3ztmcV
PW467TnpQVZmzZp0Kfs96VJQizTjw1uyAgAXB+8C4lPmsT4fydqbdY+wCen64F0fB19iWV/yF/54X0en
@ -239,7 +239,7 @@ U3kHAAD//zT+SdCcAAAA
name: "extensions_table.html",
local: "templates/extensions_table.html",
size: 353,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/2SQwU7DMBBE7/2KlemRNJwjxxwQHDnwB248DRbOOnK2tGD531HTQIvqk1fzZjU7Wuw2
gCb5CmjVNiaHVE2j7Tz3DT0osyIiynltqWlp8xSHMTJYntmN0bOUsgDJcg9ap3jw7HC8n7+z5y0epgU7
@ -252,7 +252,7 @@ oxX5HeETfMGv9NPTkv4i2e6jT3HPrqE7AEui8yaECbdWkzPYUXWlaHFkg++5VR1YkJTRlt4Tdq06HVfK
name: "footer.html",
local: "templates/footer.html",
size: 15,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/7LRT8pPqbTjstHPKMnNsQMEAAD//wEFevAPAAAA
`,
@ -262,7 +262,7 @@ H4sIAAAAAAAC/7LRT8pPqbTjstHPKMnNsQMEAAD//wEFevAPAAAA
name: "header.html",
local: "templates/header.html",
size: 467,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/5TRMU8sIRAH8P4+BY/25eC9szGGxUItLIwW11giO7uMB8wG5rxsLvfdDdnTxNhoBeFP
fpnM3/y5fbzZPj/dicAp2pVph4guj52ELK0J4Hq7EkIIk4Cd8MGVCtzJPQ/rS3mOGDmCPR7Vtl1OJ6OX
@ -276,7 +276,7 @@ vuDEoocBiqjF/5RszGuV1uhFsCujl0bMC/Vz62vzZe1hY98DAAD//7qRGmLTAQAA
name: "pipelines_table.html",
local: "templates/pipelines_table.html",
size: 1946,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/7SVwXLTMBCG7zyFxnRyIjVcU1scSpnhAMN0eAFZ2gRNlZVmJbdujd+dsWyrTp0LtL5k
rOjX/tlv/8hFEJUB5sOjgTKrLCmgrXdCajzs2MeMv2OMsSLQ8DAsFJPWeCew/MSE0QcsDewDLyr+tTbm
@ -293,7 +293,7 @@ QeMmXNC4hCvdNKvQgsYtacFoGWFFxSvCNl+lu3HQFXl8JfO/AQAA//9We3KLmgcAAA==
name: "properties_table.html",
local: "templates/properties_table.html",
size: 420,
modtime: 1605208512,
modtime: 1594178791,
compressed: `
H4sIAAAAAAAC/2SRwW7DIBBE7/6KVRr1VMc5u5gfqFT11Ds2U8sqWVuwqRoR/r1yTCpb4YAEO48ZDarV
MR7ezQkp1apqdaHEtA4U5OLQ7NrRW/gyTKYbuK/puNMFEVGMtB/Y4pfqho6UUr71hnvk0Qvt4XACyyw6