Skip to content

Commit

Permalink
PMM-11497 Update dependencies. (#87)
Browse files Browse the repository at this point in the history
  • Loading branch information
BupycHuk authored Feb 8, 2023
1 parent 5ba7c07 commit 25de313
Show file tree
Hide file tree
Showing 15 changed files with 403 additions and 310 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/go.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ jobs:
strategy:
matrix:
go-version:
- 1.17
- 1.19
include:
- go-version: tip
os: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:
name: Set up Go
uses: percona-platform/setup-go@v2
with:
go-version: 1.17
go-version: 1.19
-
name: Login to GitHub Container Registry
uses: percona-platform/login-action@v1
Expand Down
12 changes: 7 additions & 5 deletions basic/collector.go
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
package basic

import (
"fmt"
"sync"
"time"

"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"

"github.com/percona/rds_exporter/config"
"github.com/percona/rds_exporter/sessions"
Expand Down Expand Up @@ -36,12 +38,12 @@ type Collector struct {
}

// New creates a new instance of a Collector.
func New(config *config.Config, sessions *sessions.Sessions) *Collector {
func New(config *config.Config, sessions *sessions.Sessions, logger log.Logger) *Collector {
return &Collector{
config: config,
sessions: sessions,
metrics: Metrics,
l: log.With("component", "basic"),
l: log.With(logger, "component", "basic"),
}
}

Expand All @@ -63,7 +65,7 @@ func (e *Collector) collect(ch chan<- prometheus.Metric) {

for _, instance := range e.config.Instances {
if instance.DisableBasicMetrics {
e.l.Debugf("Instance %s has disabled basic metrics, skipping.", instance)
level.Debug(e.l).Log("msg", fmt.Sprintf("Instance %s has disabled basic metrics, skipping.", instance))
continue
}
instance := instance
Expand All @@ -73,7 +75,7 @@ func (e *Collector) collect(ch chan<- prometheus.Metric) {

s := NewScraper(&instance, e, ch)
if s == nil {
e.l.Errorf("No scraper for %s, skipping.", instance)
level.Error(e.l).Log("msg", fmt.Sprintf("No scraper for %s, skipping.", instance))
return
}
s.Scrape()
Expand Down
15 changes: 9 additions & 6 deletions basic/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"testing"

"github.com/percona/exporter_shared/helpers"
"github.com/prometheus/common/promlog"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"

Expand All @@ -18,11 +19,12 @@ import (
func TestCollector(t *testing.T) {
cfg, err := config.Load("../config.tests.yml")
require.NoError(t, err)
client := client.New()
sess, err := sessions.New(cfg.Instances, client.HTTP(), false)
logger := promlog.New(&promlog.Config{})
client := client.New(logger)
sess, err := sessions.New(cfg.Instances, client.HTTP(), logger, false)
require.NoError(t, err)

c := New(cfg, sess)
c := New(cfg, sess, logger)

actualMetrics := helpers.ReadMetrics(helpers.CollectMetrics(c))
sort.Slice(actualMetrics, func(i, j int) bool { return actualMetrics[i].Less(actualMetrics[j]) })
Expand Down Expand Up @@ -52,7 +54,8 @@ func TestCollector(t *testing.T) {
func TestCollectorDisableBasicMetrics(t *testing.T) {
cfg, err := config.Load("../config.tests.yml")
require.NoError(t, err)
client := client.New()
logger := promlog.New(&promlog.Config{})
client := client.New(logger)
instanceGroups := make(map[bool][]string, 2)
for i := range cfg.Instances {
// Disable basic metrics in even instances.
Expand All @@ -62,10 +65,10 @@ func TestCollectorDisableBasicMetrics(t *testing.T) {
// Groups instance names by disabled or enabled metrics.
instanceGroups[isDisabled] = append(instanceGroups[isDisabled], cfg.Instances[i].Instance)
}
sess, err := sessions.New(cfg.Instances, client.HTTP(), false)
sess, err := sessions.New(cfg.Instances, client.HTTP(), logger, false)
require.NoError(t, err)

c := New(cfg, sess)
c := New(cfg, sess, logger)

actualMetrics := helpers.ReadMetrics(helpers.CollectMetrics(c))
actualLines := helpers.Format(helpers.WriteMetrics(actualMetrics))
Expand Down
3 changes: 2 additions & 1 deletion basic/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (

"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"

"github.com/percona/rds_exporter/config"
Expand Down Expand Up @@ -85,7 +86,7 @@ func (s *Scraper) Scrape() {
defer wg.Done()

if err := s.scrapeMetric(metric); err != nil {
s.collector.l.With("metric", metric.cwName).Error(err)
level.Error(s.collector.l).Log("metric", metric.cwName, "error", err)
}
}()
}
Expand Down
5 changes: 3 additions & 2 deletions client/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"net/http"
"time"

"github.com/go-kit/log"
"github.com/prometheus/client_golang/prometheus"
)

Expand All @@ -14,8 +15,8 @@ type Client struct {
}

// New creates new Client.
func New() *Client {
t := newTransport()
func New(logger log.Logger) *Client {
t := newTransport(logger)
return &Client{
c: &http.Client{
Transport: t,
Expand Down
12 changes: 7 additions & 5 deletions client/transport.go
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
package client

import (
"fmt"
"net/http"
"strconv"
"time"

"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"
)

type transport struct {
Expand All @@ -17,14 +19,14 @@ type transport struct {
mResponses *prometheus.SummaryVec
}

func newTransport() *transport {
func newTransport(logger log.Logger) *transport {
return &transport{
t: &http.Transport{
MaxIdleConnsPerHost: 5,
IdleConnTimeout: 2 * time.Minute,
Proxy: http.ProxyFromEnvironment,
},
l: log.With("component", "transport"),
l: log.With(logger, "component", "transport"),

mRequests: prometheus.NewCounter(prometheus.CounterOpts{
Name: "rds_exporter_requests_total",
Expand All @@ -46,10 +48,10 @@ func (t *transport) RoundTrip(req *http.Request) (*http.Response, error) {
duration := time.Since(start)
if resp != nil {
t.mResponses.WithLabelValues(strconv.Itoa(resp.StatusCode)).Observe(duration.Seconds())
t.l.Debugf("%s %s -> %d (%s)", req.Method, req.URL.String(), resp.StatusCode, duration)
level.Debug(t.l).Log("msg", fmt.Sprintf("%s %s -> %d (%s)", req.Method, req.URL.String(), resp.StatusCode, duration))
} else {
t.mResponses.WithLabelValues("err").Observe(duration.Seconds())
t.l.Errorf("%s %s -> %s (%s)", req.Method, req.URL.String(), err, duration)
level.Error(t.l).Log("msg", fmt.Sprintf("%s %s -> %s (%s)", req.Method, req.URL.String(), err, duration))
}
return resp, err
}
Expand Down
12 changes: 7 additions & 5 deletions enhanced/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@ package enhanced

import (
"context"
"fmt"
"sync"
"time"

"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"

"github.com/percona/rds_exporter/sessions"
)
Expand All @@ -27,15 +29,15 @@ const (
)

// NewCollector creates new collector and starts scrapers.
func NewCollector(sessions *sessions.Sessions) *Collector {
func NewCollector(sessions *sessions.Sessions, logger log.Logger) *Collector {
c := &Collector{
sessions: sessions,
logger: log.With("component", "enhanced"),
logger: log.With(logger, "component", "enhanced"),
metrics: make(map[string][]prometheus.Metric),
}

for session, instances := range sessions.AllSessions() {
s := newScraper(session, instances)
s := newScraper(session, instances, logger)

interval := maxInterval
for _, instance := range instances {
Expand All @@ -46,7 +48,7 @@ func NewCollector(sessions *sessions.Sessions) *Collector {
if interval < minInterval {
interval = minInterval
}
s.logger.Infof("Updating enhanced metrics every %s.", interval)
level.Info(s.logger).Log("msg", fmt.Sprintf("Updating enhanced metrics every %s.", interval))

// perform first scrapes synchronously so returned collector has all metric descriptions
m, _ := s.scrape(context.TODO())
Expand Down
29 changes: 16 additions & 13 deletions enhanced/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/log"

"github.com/percona/rds_exporter/sessions"
)
Expand All @@ -25,7 +26,7 @@ type scraper struct {
testDisallowUnknownFields bool // for tests only
}

func newScraper(session *session.Session, instances []sessions.Instance) *scraper {
func newScraper(session *session.Session, instances []sessions.Instance, logger log.Logger) *scraper {
logStreamNames := make([]string, 0, len(instances))
for _, instance := range instances {
logStreamNames = append(logStreamNames, instance.ResourceID)
Expand All @@ -36,7 +37,7 @@ func newScraper(session *session.Session, instances []sessions.Instance) *scrape
logStreamNames: logStreamNames,
svc: cloudwatchlogs.New(session),
nextStartTime: time.Now().Add(-3 * time.Minute).Round(0), // strip monotonic clock reading
logger: log.With("component", "enhanced"),
logger: log.With(logger, "component", "enhanced"),
}
}

Expand Down Expand Up @@ -82,14 +83,16 @@ func (s *scraper) scrape(ctx context.Context) (map[string][]prometheus.Metric, m
StartTime: aws.Int64(aws.TimeUnixMilli(s.nextStartTime)),
}

s.logger.With("next_start", s.nextStartTime.UTC()).With("since_last", time.Since(s.nextStartTime)).Debugf("Requesting metrics")
level.Debug(log.With(s.logger, "next_start", s.nextStartTime.UTC(), "since_last", time.Since(s.nextStartTime))).Log("msg", "Requesting metrics")

// collect all returned events and metrics/messages
collectAllMetrics := func(output *cloudwatchlogs.FilterLogEventsOutput, lastPage bool) bool {
for _, event := range output.Events {
l := s.logger.With("EventId", *event.EventId).With("LogStreamName", *event.LogStreamName)
l = l.With("Timestamp", aws.MillisecondsTimeValue(event.Timestamp).UTC())
l = l.With("IngestionTime", aws.MillisecondsTimeValue(event.IngestionTime).UTC())
l := log.With(s.logger,
"EventId", *event.EventId,
"LogStreamName", *event.LogStreamName,
"Timestamp", aws.MillisecondsTimeValue(event.Timestamp).UTC(),
"IngestionTime", aws.MillisecondsTimeValue(event.IngestionTime).UTC())

var instance *sessions.Instance
for _, i := range s.instances {
Expand All @@ -99,15 +102,15 @@ func (s *scraper) scrape(ctx context.Context) (map[string][]prometheus.Metric, m
}
}
if instance == nil {
l.Errorf("Failed to find instance.")
level.Error(l).Log("msg", "Failed to find instance.")
continue
}

if instance.DisableEnhancedMetrics {
l.Debugf("Enhanced Metrics are disabled for instance %v.", instance)
level.Debug(l).Log("msg", fmt.Sprintf("Enhanced Metrics are disabled for instance %v.", instance))
continue
}
l = l.With("region", instance.Region).With("instance", instance.Instance)
l = log.With(l, "region", instance.Region, "instance", instance.Instance)

// l.Debugf("Message:\n%s", *event.Message)
osMetrics, err := parseOSMetrics([]byte(*event.Message), s.testDisallowUnknownFields)
Expand All @@ -117,13 +120,13 @@ func (s *scraper) scrape(ctx context.Context) (map[string][]prometheus.Metric, m
panic(fmt.Sprintf("New metrics should be added: %s", err))
}

l.Errorf("Failed to parse metrics: %s.", err)
level.Error(l).Log("msg", "Failed to parse metrics.", "error", err)
continue
}
// l.Debugf("OS Metrics:\n%#v", osMetrics)

timestamp := aws.MillisecondsTimeValue(event.Timestamp).UTC()
l.Debugf("Timestamp from message: %s; from event: %s.", osMetrics.Timestamp.UTC(), timestamp)
level.Debug(l).Log("msg", fmt.Sprintf("Timestamp from message: %s; from event: %s.", osMetrics.Timestamp.UTC(), timestamp))

if allMetrics[instance.ResourceID] == nil {
allMetrics[instance.ResourceID] = make(map[time.Time][]prometheus.Metric)
Expand All @@ -139,7 +142,7 @@ func (s *scraper) scrape(ctx context.Context) (map[string][]prometheus.Metric, m
return true // continue pagination
}
if err := s.svc.FilterLogEventsPagesWithContext(ctx, input, collectAllMetrics); err != nil {
s.logger.Errorf("Failed to filter log events: %s.", err)
level.Error(s.logger).Log("msg", "Failed to filter log events.", "error", err)
}
}
// get better times
Expand Down
15 changes: 9 additions & 6 deletions enhanced/scraper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"time"

"github.com/percona/exporter_shared/helpers"
"github.com/prometheus/common/promlog"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"

Expand Down Expand Up @@ -40,15 +41,16 @@ func filterMetrics(metrics []*helpers.Metric) []*helpers.Metric {
func TestScraper(t *testing.T) {
cfg, err := config.Load("../config.tests.yml")
require.NoError(t, err)
client := client.New()
sess, err := sessions.New(cfg.Instances, client.HTTP(), false)
logger := promlog.New(&promlog.Config{})
client := client.New(logger)
sess, err := sessions.New(cfg.Instances, client.HTTP(), logger, false)
require.NoError(t, err)

for session, instances := range sess.AllSessions() {
session, instances := session, instances
t.Run(fmt.Sprint(instances), func(t *testing.T) {
// test that there are no new metrics
s := newScraper(session, instances)
s := newScraper(session, instances, logger)
s.testDisallowUnknownFields = true
metrics, messages := s.scrape(context.Background())
require.Len(t, metrics, len(instances))
Expand Down Expand Up @@ -139,14 +141,15 @@ func TestBetterTimes(t *testing.T) {
func TestScraperDisableEnhancedMetrics(t *testing.T) {
cfg, err := config.Load("../config.tests.yml")
require.NoError(t, err)
client := client.New()
logger := promlog.New(&promlog.Config{})
client := client.New(logger)
for i := range cfg.Instances {
// Disable enhanced metrics in even instances.
// This disable instance: no-such-instance.
isDisabled := i%2 == 0
cfg.Instances[i].DisableEnhancedMetrics = isDisabled
}
sess, err := sessions.New(cfg.Instances, client.HTTP(), false)
sess, err := sessions.New(cfg.Instances, client.HTTP(), logger, false)
require.NoError(t, err)

// Check if all collected metrics do not contain metrics for instance with disabled metrics.
Expand All @@ -162,7 +165,7 @@ func TestScraperDisableEnhancedMetrics(t *testing.T) {
for session, instances := range sess.AllSessions() {
session, instances := session, instances
t.Run(fmt.Sprint(instances), func(t *testing.T) {
s := newScraper(session, instances)
s := newScraper(session, instances, logger)
s.testDisallowUnknownFields = true
metrics, _ := s.scrape(context.Background())

Expand Down
Loading

0 comments on commit 25de313

Please sign in to comment.