Skip to content

Commit

Permalink
[receiver/hostmetrics] register scrapers with scraper names (#35837)
Browse files Browse the repository at this point in the history
#### Description
This PR changes each scraper's factory to create a scraper with the
scraper's name. This is to ensure that logs from the `scrapercontroller`
will contain the name of the scraper.

#### Link to tracking issue
Fixes #35814 

#### Testing
New unit tests

Built the collector with `cpu` scraper modified to immediately return an
error and got the following message:
```
2024-10-16T14:58:46.396Z        error   scraperhelper/scrapercontroller.go:205  Error scraping metrics  {"kind": "receiver", "name": "hostmetrics", "data_type": "metrics", "error": "hi i failed", "scraper": "cpu"}
```
  • Loading branch information
braydonk authored Oct 24, 2024
1 parent 8988740 commit cf3f7df
Show file tree
Hide file tree
Showing 19 changed files with 99 additions and 18 deletions.
27 changes: 27 additions & 0 deletions .chloggen/hostmetrics-report-scraper-names-in-errors.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: hostmetrics

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Adjust scraper creation to make it so the scraper name is reported with hostmetrics scraper errors.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [35814]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package cpuscraper // import "github.com/open-telemetry/opentelemetry-collector-
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/cpuscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "cpu"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct{}

Expand All @@ -41,7 +46,7 @@ func (f *Factory) CreateMetricsScraper(
s := newCPUScraper(ctx, settings, cfg)

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package diskscraper // import "github.com/open-telemetry/opentelemetry-collector
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/diskscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "disk"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -45,7 +50,7 @@ func (f *Factory) CreateMetricsScraper(
}

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}

func TestCreateMetricsScraper_Error(t *testing.T) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@ import (
"context"
"os"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/filesystemscraper/internal/metadata"
)

Expand All @@ -22,6 +22,11 @@ const (
TypeStr = "filesystem"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand Down Expand Up @@ -70,5 +75,5 @@ func (f *Factory) CreateMetricsScraper(
}

return scraperhelper.NewScraper(
hostmeta.Type, s.scrape, scraperhelper.WithStart(s.start))
scraperType, s.scrape, scraperhelper.WithStart(s.start))
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}

func TestCreateMetricsScraper_Error(t *testing.T) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package loadscraper // import "github.com/open-telemetry/opentelemetry-collector
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/loadscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "load"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -42,7 +47,7 @@ func (f *Factory) CreateMetricsScraper(
s := newLoadScraper(ctx, settings, cfg)

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
scraperhelper.WithShutdown(s.shutdown),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package memoryscraper // import "github.com/open-telemetry/opentelemetry-collect
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/memoryscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "memory"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -42,5 +47,5 @@ func (f *Factory) CreateMetricsScraper(
s := newMemoryScraper(ctx, settings, cfg)

return scraperhelper.NewScraper(
hostmeta.Type, s.scrape, scraperhelper.WithStart(s.start))
scraperType, s.scrape, scraperhelper.WithStart(s.start))
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package networkscraper // import "github.com/open-telemetry/opentelemetry-collec
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/networkscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "network"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -45,7 +50,7 @@ func (f *Factory) CreateMetricsScraper(
}

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}

func TestCreateMetricsScraper_Error(t *testing.T) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package pagingscraper // import "github.com/open-telemetry/opentelemetry-collect
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/pagingscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "paging"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -42,7 +47,7 @@ func (f *Factory) CreateMetricsScraper(
s := newPagingScraper(ctx, settings, cfg)

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,5 @@ func TestCreateMetricsScraper(t *testing.T) {
scraper, err := factory.CreateMetricsScraper(context.Background(), receivertest.NewNopSettings(), cfg)
assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ package processesscraper // import "github.com/open-telemetry/opentelemetry-coll
import (
"context"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/processesscraper/internal/metadata"
)

Expand All @@ -21,6 +21,11 @@ const (
TypeStr = "processes"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

// Factory is the Factory for scraper.
type Factory struct {
}
Expand All @@ -42,7 +47,7 @@ func (f *Factory) CreateMetricsScraper(
s := newProcessesScraper(ctx, settings, cfg)

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,5 @@ func TestCreateMetricsScraper(t *testing.T) {

assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ import (
"errors"
"runtime"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/featuregate"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/scraperhelper"

"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal"
hostmeta "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/metadata"
"github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/processscraper/internal/metadata"
)

Expand All @@ -24,6 +24,11 @@ const (
TypeStr = "process"
)

var (
// scraperType is the component type used for the built scraper.
scraperType component.Type = component.MustNewType(TypeStr)
)

var (
bootTimeCacheFeaturegateID = "hostmetrics.process.bootTimeCache"
bootTimeCacheFeaturegate = featuregate.GlobalRegistry().MustRegister(
Expand Down Expand Up @@ -62,7 +67,7 @@ func (f *Factory) CreateMetricsScraper(
}

return scraperhelper.NewScraper(
hostmeta.Type,
scraperType,
s.scrape,
scraperhelper.WithStart(s.start),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ func TestCreateResourceMetricsScraper(t *testing.T) {
if runtime.GOOS == "linux" || runtime.GOOS == "windows" || runtime.GOOS == "darwin" {
assert.NoError(t, err)
assert.NotNil(t, scraper)
assert.Equal(t, scraperType.String(), scraper.ID().String())
} else {
assert.Error(t, err)
assert.Nil(t, scraper)
Expand Down

0 comments on commit cf3f7df

Please sign in to comment.