From be5d8de5e561e4ba94cbd08dc390811d36fc06ed Mon Sep 17 00:00:00 2001 From: Pablo Baeyens Date: Wed, 10 Nov 2021 12:33:19 +0100 Subject: [PATCH 1/3] [exporter/datadogexporter] Take hostname into account for cache --- .../internal/translator/dimensions.go | 97 ++++++++ .../internal/translator/dimensions_test.go | 96 +++++++ .../internal/translator/metrics_translator.go | 142 +++++------ .../translator/metrics_translator_test.go | 234 +++++++++--------- .../internal/translator/sketches_test.go | 7 +- .../internal/translator/ttlcache.go | 44 +--- .../internal/translator/ttlcache_test.go | 83 ++----- 7 files changed, 406 insertions(+), 297 deletions(-) create mode 100644 exporter/datadogexporter/internal/translator/dimensions.go create mode 100644 exporter/datadogexporter/internal/translator/dimensions_test.go diff --git a/exporter/datadogexporter/internal/translator/dimensions.go b/exporter/datadogexporter/internal/translator/dimensions.go new file mode 100644 index 000000000000..ff7c6a41bc86 --- /dev/null +++ b/exporter/datadogexporter/internal/translator/dimensions.go @@ -0,0 +1,97 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package translator + +import ( + "fmt" + "sort" + "strings" + + "go.opentelemetry.io/collector/model/pdata" + + "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/internal/translator/utils" +) + +const ( + dimensionSeparator = string(byte(0)) +) + +type metricsDimensions struct { + name string + tags []string + host string +} + +// getTags maps an attributeMap into a slice of Datadog tags +func getTags(labels pdata.AttributeMap) []string { + tags := make([]string, 0, labels.Len()) + labels.Range(func(key string, value pdata.AttributeValue) bool { + v := value.AsString() + tags = append(tags, utils.FormatKeyValueTag(key, v)) + return true + }) + return tags +} + +// AddTags to metrics dimensions. The argument may be modified. +func (m *metricsDimensions) AddTags(tags ...string) metricsDimensions { + return metricsDimensions{ + name: m.name, + // append the field to the passed argument, + // so that the slice we modify is the one we get as an argument. + tags: append(tags, m.tags...), + host: m.host, + } +} + +// WithAttributeMap creates a new metricDimensions struct with additional tags from attributes. +func (m *metricsDimensions) WithAttributeMap(labels pdata.AttributeMap) metricsDimensions { + return m.AddTags(getTags(labels)...) +} + +// WithSuffix creates a new dimensions struct with an extra name suffix. +func (m *metricsDimensions) WithSuffix(suffix string) metricsDimensions { + return metricsDimensions{ + name: fmt.Sprintf("%s.%s", m.name, suffix), + host: m.host, + tags: m.tags, + } +} + +// Uses a logic similar to what is done in the span processor to build metric keys: +// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/b2327211df976e0a57ef0425493448988772a16b/processor/spanmetricsprocessor/processor.go#L353-L387 +// TODO: make this a public util function? +func concatDimensionValue(metricKeyBuilder *strings.Builder, value string) { + metricKeyBuilder.WriteString(value) + metricKeyBuilder.WriteString(dimensionSeparator) +} + +// String maps dimensions to a string to use as an identifier. +// The tags order does not matter. +func (m *metricsDimensions) String() string { + var metricKeyBuilder strings.Builder + + dimensions := make([]string, len(m.tags)) + copy(dimensions, m.tags) + + dimensions = append(dimensions, fmt.Sprintf("name:%s", m.name)) + dimensions = append(dimensions, fmt.Sprintf("host:%s", m.name)) + sort.Strings(dimensions) + + for _, dim := range dimensions { + concatDimensionValue(&metricKeyBuilder, dim) + } + return metricKeyBuilder.String() +} diff --git a/exporter/datadogexporter/internal/translator/dimensions_test.go b/exporter/datadogexporter/internal/translator/dimensions_test.go new file mode 100644 index 000000000000..fe1546aaff9d --- /dev/null +++ b/exporter/datadogexporter/internal/translator/dimensions_test.go @@ -0,0 +1,96 @@ +// Copyright The OpenTelemetry Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package translator + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "go.opentelemetry.io/collector/model/pdata" +) + +func TestWithAttributeMap(t *testing.T) { + attributes := pdata.NewAttributeMapFromMap(map[string]pdata.AttributeValue{ + "key1": pdata.NewAttributeValueString("val1"), + "key2": pdata.NewAttributeValueString("val2"), + "key3": pdata.NewAttributeValueString(""), + }) + + dims := metricsDimensions{} + assert.ElementsMatch(t, + dims.WithAttributeMap(attributes).tags, + [...]string{"key1:val1", "key2:val2", "key3:n/a"}, + ) +} + +func TestMetricDimensionsString(t *testing.T) { + getKey := func(name string, tags []string) string { + dims := metricsDimensions{name: name, tags: tags} + return dims.String() + } + metricName := "metric.name" + noTags := getKey(metricName, []string{}) + someTags := getKey(metricName, []string{"key1:val1", "key2:val2"}) + sameTags := getKey(metricName, []string{"key2:val2", "key1:val1"}) + diffTags := getKey(metricName, []string{"key3:val3"}) + + assert.NotEqual(t, noTags, someTags) + assert.NotEqual(t, someTags, diffTags) + assert.Equal(t, someTags, sameTags) +} + +func TestMetricDimensionsStringNoTagsChange(t *testing.T) { + // The original metricDimensionsToMapKey had an issue where: + // - if the capacity of the tags array passed to it was higher than its length + // - and the metric name is earlier (in alphabetical order) than one of the tags + // then the original tag array would be modified (without a reallocation, since there is enough capacity), + // and would contain a tag labeled as the metric name, while the final tag (in alphabetical order) + // would get left out. + // This test checks that this doesn't happen anymore. + + originalTags := make([]string, 2, 3) + originalTags[0] = "key1:val1" + originalTags[1] = "key2:val2" + + dims := metricsDimensions{ + name: "a.metric.name", + tags: originalTags, + } + + _ = dims.String() + assert.Equal(t, []string{"key1:val1", "key2:val2"}, originalTags) + +} + +var testDims metricsDimensions = metricsDimensions{ + name: "test.metric", + tags: []string{"key:val"}, + host: "host", +} + +func TestWithSuffix(t *testing.T) { + dimsSuf1 := testDims.WithSuffix("suffixOne") + dimsSuf2 := testDims.WithSuffix("suffixTwo") + + assert.Equal(t, "test.metric", testDims.name) + assert.Equal(t, "test.metric.suffixOne", dimsSuf1.name) + assert.Equal(t, "test.metric.suffixTwo", dimsSuf2.name) +} + +func TestAddTags(t *testing.T) { + dimsWithTags := testDims.AddTags("key1:val1", "key2:val2") + assert.ElementsMatch(t, []string{"key:val", "key1:val1", "key2:val2"}, dimsWithTags.tags) + assert.ElementsMatch(t, []string{"key:val"}, testDims.tags) +} diff --git a/exporter/datadogexporter/internal/translator/metrics_translator.go b/exporter/datadogexporter/internal/translator/metrics_translator.go index 21a01c403cb3..00d6f846444b 100644 --- a/exporter/datadogexporter/internal/translator/metrics_translator.go +++ b/exporter/datadogexporter/internal/translator/metrics_translator.go @@ -26,7 +26,6 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/internal/attributes" "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/internal/instrumentationlibrary" - "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/internal/translator/utils" ) const metricName string = "metric name" @@ -67,17 +66,6 @@ func New(logger *zap.Logger, options ...Option) (*Translator, error) { return &Translator{cache, logger, cfg}, nil } -// getTags maps an attributeMap into a slice of Datadog tags -func getTags(labels pdata.AttributeMap) []string { - tags := make([]string, 0, labels.Len()) - labels.Range(func(key string, value pdata.AttributeValue) bool { - v := value.AsString() - tags = append(tags, utils.FormatKeyValueTag(key, v)) - return true - }) - return tags -} - // isCumulativeMonotonic checks if a metric is a cumulative monotonic metric func isCumulativeMonotonic(md pdata.Metric) bool { switch md.DataType() { @@ -102,17 +90,14 @@ func (t *Translator) isSkippable(name string, v float64) bool { func (t *Translator) mapNumberMetrics( ctx context.Context, consumer TimeSeriesConsumer, - name string, + dims metricsDimensions, dt MetricDataType, slice pdata.NumberDataPointSlice, - additionalTags []string, - host string, ) { for i := 0; i < slice.Len(); i++ { p := slice.At(i) - tags := getTags(p.Attributes()) - tags = append(tags, additionalTags...) + pointDims := dims.WithAttributeMap(p.Attributes()) var val float64 switch p.Type() { case pdata.MetricValueTypeDouble: @@ -121,11 +106,11 @@ func (t *Translator) mapNumberMetrics( val = float64(p.IntVal()) } - if t.isSkippable(name, val) { + if t.isSkippable(pointDims.name, val) { continue } - consumer.ConsumeTimeSeries(ctx, name, dt, uint64(p.Timestamp()), val, tags, host) + consumer.ConsumeTimeSeries(ctx, pointDims.name, dt, uint64(p.Timestamp()), val, pointDims.tags, pointDims.host) } } @@ -133,17 +118,14 @@ func (t *Translator) mapNumberMetrics( func (t *Translator) mapNumberMonotonicMetrics( ctx context.Context, consumer TimeSeriesConsumer, - name string, + dims metricsDimensions, slice pdata.NumberDataPointSlice, - additionalTags []string, - host string, ) { for i := 0; i < slice.Len(); i++ { p := slice.At(i) ts := uint64(p.Timestamp()) startTs := uint64(p.StartTimestamp()) - tags := getTags(p.Attributes()) - tags = append(tags, additionalTags...) + pointDims := dims.WithAttributeMap(p.Attributes()) var val float64 switch p.Type() { @@ -153,12 +135,12 @@ func (t *Translator) mapNumberMonotonicMetrics( val = float64(p.IntVal()) } - if t.isSkippable(name, val) { + if t.isSkippable(pointDims.name, val) { continue } - if dx, ok := t.prevPts.MonotonicDiff(name, tags, startTs, ts, val); ok { - consumer.ConsumeTimeSeries(ctx, name, Count, ts, dx, tags, host) + if dx, ok := t.prevPts.MonotonicDiff(pointDims, startTs, ts, val); ok { + consumer.ConsumeTimeSeries(ctx, pointDims.name, Count, ts, dx, pointDims.tags, pointDims.host) } } } @@ -179,11 +161,9 @@ func getBounds(p pdata.HistogramDataPoint, idx int) (lowerBound float64, upperBo func (t *Translator) getSketchBuckets( ctx context.Context, consumer SketchConsumer, - name string, + pointDims metricsDimensions, p pdata.HistogramDataPoint, delta bool, - tags []string, - host string, ) { startTs := uint64(p.StartTimestamp()) ts := uint64(p.Timestamp()) @@ -195,11 +175,10 @@ func (t *Translator) getSketchBuckets( // The bucketTags are computed from the bounds before the InsertInterpolate fix is done, // otherwise in the case where p.ExplicitBounds() has a size of 1 (eg. [0]), the two buckets // would have the same bucketTags (lower_bound:0 and upper_bound:0), resulting in a buggy behavior. - bucketTags := []string{ + bucketDims := pointDims.AddTags( fmt.Sprintf("lower_bound:%s", formatFloat(lowerBound)), fmt.Sprintf("upper_bound:%s", formatFloat(upperBound)), - } - bucketTags = append(bucketTags, tags...) + ) // InsertInterpolate doesn't work with an infinite bound; insert in to the bucket that contains the non-infinite bound // https://github.com/DataDog/datadog-agent/blob/7.31.0/pkg/aggregator/check_sampler.go#L107-L111 @@ -212,7 +191,7 @@ func (t *Translator) getSketchBuckets( count := p.BucketCounts()[j] if delta { as.InsertInterpolate(lowerBound, upperBound, uint(count)) - } else if dx, ok := t.prevPts.Diff(name, bucketTags, startTs, ts, float64(count)); ok { + } else if dx, ok := t.prevPts.Diff(bucketDims, startTs, ts, float64(count)); ok { as.InsertInterpolate(lowerBound, upperBound, uint(dx)) } @@ -220,37 +199,34 @@ func (t *Translator) getSketchBuckets( sketch := as.Finish() if sketch != nil { - consumer.ConsumeSketch(ctx, name, ts, sketch, tags, host) + consumer.ConsumeSketch(ctx, pointDims.name, ts, sketch, pointDims.tags, pointDims.host) } } func (t *Translator) getLegacyBuckets( ctx context.Context, consumer TimeSeriesConsumer, - name string, + pointDims metricsDimensions, p pdata.HistogramDataPoint, delta bool, - tags []string, - host string, ) { startTs := uint64(p.StartTimestamp()) ts := uint64(p.Timestamp()) // We have a single metric, 'bucket', which is tagged with the bucket bounds. See: // https://github.com/DataDog/integrations-core/blob/7.30.1/datadog_checks_base/datadog_checks/base/checks/openmetrics/v2/transformers/histogram.py - fullName := fmt.Sprintf("%s.bucket", name) + baseBucketDims := pointDims.WithSuffix("bucket") for idx, val := range p.BucketCounts() { lowerBound, upperBound := getBounds(p, idx) - bucketTags := []string{ + bucketDims := baseBucketDims.AddTags( fmt.Sprintf("lower_bound:%s", formatFloat(lowerBound)), fmt.Sprintf("upper_bound:%s", formatFloat(upperBound)), - } - bucketTags = append(bucketTags, tags...) + ) count := float64(val) if delta { - consumer.ConsumeTimeSeries(ctx, fullName, Count, ts, count, bucketTags, host) - } else if dx, ok := t.prevPts.Diff(fullName, bucketTags, startTs, ts, count); ok { - consumer.ConsumeTimeSeries(ctx, fullName, Count, ts, dx, bucketTags, host) + consumer.ConsumeTimeSeries(ctx, bucketDims.name, Count, ts, count, bucketDims.tags, bucketDims.host) + } else if dx, ok := t.prevPts.Diff(bucketDims, startTs, ts, count); ok { + consumer.ConsumeTimeSeries(ctx, bucketDims.name, Count, ts, dx, bucketDims.tags, bucketDims.host) } } } @@ -271,46 +247,43 @@ func (t *Translator) getLegacyBuckets( func (t *Translator) mapHistogramMetrics( ctx context.Context, consumer Consumer, - name string, + dims metricsDimensions, slice pdata.HistogramDataPointSlice, delta bool, - additionalTags []string, - host string, ) { for i := 0; i < slice.Len(); i++ { p := slice.At(i) startTs := uint64(p.StartTimestamp()) ts := uint64(p.Timestamp()) - tags := getTags(p.Attributes()) - tags = append(tags, additionalTags...) + pointDims := dims.WithAttributeMap(p.Attributes()) if t.cfg.SendCountSum { count := float64(p.Count()) - countName := fmt.Sprintf("%s.count", name) + countDims := pointDims.WithSuffix("count") if delta { - consumer.ConsumeTimeSeries(ctx, countName, Count, ts, count, tags, host) - } else if dx, ok := t.prevPts.Diff(countName, tags, startTs, ts, count); ok { - consumer.ConsumeTimeSeries(ctx, countName, Count, ts, dx, tags, host) + consumer.ConsumeTimeSeries(ctx, countDims.name, Count, ts, count, countDims.tags, countDims.host) + } else if dx, ok := t.prevPts.Diff(countDims, startTs, ts, count); ok { + consumer.ConsumeTimeSeries(ctx, countDims.name, Count, ts, dx, countDims.tags, countDims.host) } } if t.cfg.SendCountSum { sum := p.Sum() - sumName := fmt.Sprintf("%s.sum", name) - if !t.isSkippable(sumName, p.Sum()) { + sumDims := pointDims.WithSuffix("sum") + if !t.isSkippable(sumDims.name, p.Sum()) { if delta { - consumer.ConsumeTimeSeries(ctx, sumName, Count, ts, sum, tags, host) - } else if dx, ok := t.prevPts.Diff(sumName, tags, startTs, ts, sum); ok { - consumer.ConsumeTimeSeries(ctx, sumName, Count, ts, dx, tags, host) + consumer.ConsumeTimeSeries(ctx, sumDims.name, Count, ts, sum, sumDims.tags, sumDims.host) + } else if dx, ok := t.prevPts.Diff(sumDims, startTs, ts, sum); ok { + consumer.ConsumeTimeSeries(ctx, sumDims.name, Count, ts, dx, sumDims.tags, sumDims.host) } } } switch t.cfg.HistMode { case HistogramModeCounters: - t.getLegacyBuckets(ctx, consumer, name, p, delta, tags, host) + t.getLegacyBuckets(ctx, consumer, pointDims, p, delta) case HistogramModeDistributions: - t.getSketchBuckets(ctx, consumer, name, p, delta, tags, host) + t.getSketchBuckets(ctx, consumer, pointDims, p, delta) } } } @@ -346,49 +319,45 @@ func getQuantileTag(quantile float64) string { func (t *Translator) mapSummaryMetrics( ctx context.Context, consumer TimeSeriesConsumer, - name string, + dims metricsDimensions, slice pdata.SummaryDataPointSlice, - additionalTags []string, - host string, ) { for i := 0; i < slice.Len(); i++ { p := slice.At(i) startTs := uint64(p.StartTimestamp()) ts := uint64(p.Timestamp()) - tags := getTags(p.Attributes()) - tags = append(tags, additionalTags...) + pointDims := dims.WithAttributeMap(p.Attributes()) // count and sum are increasing; we treat them as cumulative monotonic sums. { - countName := fmt.Sprintf("%s.count", name) - if dx, ok := t.prevPts.Diff(countName, tags, startTs, ts, float64(p.Count())); ok && !t.isSkippable(countName, dx) { - consumer.ConsumeTimeSeries(ctx, countName, Count, ts, dx, tags, host) + countDims := pointDims.WithSuffix("count") + if dx, ok := t.prevPts.Diff(countDims, startTs, ts, float64(p.Count())); ok && !t.isSkippable(countDims.name, dx) { + consumer.ConsumeTimeSeries(ctx, countDims.name, Count, ts, dx, countDims.tags, countDims.host) } } { - sumName := fmt.Sprintf("%s.sum", name) - if !t.isSkippable(sumName, p.Sum()) { - if dx, ok := t.prevPts.Diff(sumName, tags, startTs, ts, p.Sum()); ok { - consumer.ConsumeTimeSeries(ctx, sumName, Count, ts, dx, tags, host) + sumDims := pointDims.WithSuffix("sum") + if !t.isSkippable(sumDims.name, p.Sum()) { + if dx, ok := t.prevPts.Diff(sumDims, startTs, ts, p.Sum()); ok { + consumer.ConsumeTimeSeries(ctx, sumDims.name, Count, ts, dx, sumDims.tags, sumDims.host) } } } if t.cfg.Quantiles { - fullName := fmt.Sprintf("%s.quantile", name) + baseQuantileDims := pointDims.WithSuffix("quantile") quantiles := p.QuantileValues() for i := 0; i < quantiles.Len(); i++ { q := quantiles.At(i) - if t.isSkippable(fullName, q.Value()) { + if t.isSkippable(baseQuantileDims.name, q.Value()) { continue } - quantileTags := []string{getQuantileTag(q.Quantile())} - quantileTags = append(quantileTags, tags...) - consumer.ConsumeTimeSeries(ctx, fullName, Gauge, ts, q.Value(), quantileTags, host) + quantileDims := baseQuantileDims.AddTags(getQuantileTag(q.Quantile())) + consumer.ConsumeTimeSeries(ctx, quantileDims.name, Gauge, ts, q.Value(), quantileDims.tags, quantileDims.host) } } } @@ -436,19 +405,24 @@ func (t *Translator) MapMetrics(ctx context.Context, md pdata.Metrics, consumer for k := 0; k < metricsArray.Len(); k++ { md := metricsArray.At(k) + baseDims := metricsDimensions{ + name: md.Name(), + tags: additionalTags, + host: host, + } switch md.DataType() { case pdata.MetricDataTypeGauge: - t.mapNumberMetrics(ctx, consumer, md.Name(), Gauge, md.Gauge().DataPoints(), additionalTags, host) + t.mapNumberMetrics(ctx, consumer, baseDims, Gauge, md.Gauge().DataPoints()) case pdata.MetricDataTypeSum: switch md.Sum().AggregationTemporality() { case pdata.MetricAggregationTemporalityCumulative: if t.cfg.SendMonotonic && isCumulativeMonotonic(md) { - t.mapNumberMonotonicMetrics(ctx, consumer, md.Name(), md.Sum().DataPoints(), additionalTags, host) + t.mapNumberMonotonicMetrics(ctx, consumer, baseDims, md.Sum().DataPoints()) } else { - t.mapNumberMetrics(ctx, consumer, md.Name(), Gauge, md.Sum().DataPoints(), additionalTags, host) + t.mapNumberMetrics(ctx, consumer, baseDims, Gauge, md.Sum().DataPoints()) } case pdata.MetricAggregationTemporalityDelta: - t.mapNumberMetrics(ctx, consumer, md.Name(), Count, md.Sum().DataPoints(), additionalTags, host) + t.mapNumberMetrics(ctx, consumer, baseDims, Count, md.Sum().DataPoints()) default: // pdata.AggregationTemporalityUnspecified or any other not supported type t.logger.Debug("Unknown or unsupported aggregation temporality", zap.String(metricName, md.Name()), @@ -460,7 +434,7 @@ func (t *Translator) MapMetrics(ctx context.Context, md pdata.Metrics, consumer switch md.Histogram().AggregationTemporality() { case pdata.MetricAggregationTemporalityCumulative, pdata.MetricAggregationTemporalityDelta: delta := md.Histogram().AggregationTemporality() == pdata.MetricAggregationTemporalityDelta - t.mapHistogramMetrics(ctx, consumer, md.Name(), md.Histogram().DataPoints(), delta, additionalTags, host) + t.mapHistogramMetrics(ctx, consumer, baseDims, md.Histogram().DataPoints(), delta) default: // pdata.AggregationTemporalityUnspecified or any other not supported type t.logger.Debug("Unknown or unsupported aggregation temporality", zap.String("metric name", md.Name()), @@ -469,7 +443,7 @@ func (t *Translator) MapMetrics(ctx context.Context, md pdata.Metrics, consumer continue } case pdata.MetricDataTypeSummary: - t.mapSummaryMetrics(ctx, consumer, md.Name(), md.Summary().DataPoints(), additionalTags, host) + t.mapSummaryMetrics(ctx, consumer, baseDims, md.Summary().DataPoints()) default: // pdata.MetricDataTypeNone or any other not supported type t.logger.Debug("Unknown or unsupported metric type", zap.String(metricName, md.Name()), zap.Any("data type", md.DataType())) continue diff --git a/exporter/datadogexporter/internal/translator/metrics_translator_test.go b/exporter/datadogexporter/internal/translator/metrics_translator_test.go index 7b393d10941b..aa7e399fa36b 100644 --- a/exporter/datadogexporter/internal/translator/metrics_translator_test.go +++ b/exporter/datadogexporter/internal/translator/metrics_translator_test.go @@ -35,19 +35,6 @@ import ( "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/datadogexporter/internal/attributes" ) -func TestGetTags(t *testing.T) { - attributes := pdata.NewAttributeMapFromMap(map[string]pdata.AttributeValue{ - "key1": pdata.NewAttributeValueString("val1"), - "key2": pdata.NewAttributeValueString("val2"), - "key3": pdata.NewAttributeValueString(""), - }) - - assert.ElementsMatch(t, - getTags(attributes), - [...]string{"key1:val1", "key2:val2", "key3:n/a"}, - ) -} - func TestIsCumulativeMonotonic(t *testing.T) { // Some of these examples are from the hostmetrics receiver // and reflect the semantic meaning of the metrics there. @@ -168,16 +155,20 @@ func (m *mockTimeSeriesConsumer) ConsumeTimeSeries( ) } -func newGauge(name string, ts uint64, val float64, tags []string) metric { - return metric{name: name, typ: Gauge, timestamp: ts, value: val, tags: tags} +func newDims(name string) metricsDimensions { + return metricsDimensions{name: name, tags: []string{}} +} + +func newGauge(dims metricsDimensions, ts uint64, val float64) metric { + return metric{name: dims.name, typ: Gauge, timestamp: ts, value: val, tags: dims.tags} } -func newCount(name string, ts uint64, val float64, tags []string) metric { - return metric{name: name, typ: Count, timestamp: ts, value: val, tags: tags} +func newCount(dims metricsDimensions, ts uint64, val float64) metric { + return metric{name: dims.name, typ: Count, timestamp: ts, value: val, tags: dims.tags} } -func newSketch(name string, ts uint64, s summary.Summary, tags []string) sketch { - return sketch{name: name, basic: s, timestamp: ts, tags: tags} +func newSketch(dims metricsDimensions, ts uint64, s summary.Summary) sketch { + return sketch{name: dims.name, basic: s, timestamp: ts, tags: dims.tags} } func TestMapIntMetrics(t *testing.T) { @@ -190,25 +181,28 @@ func TestMapIntMetrics(t *testing.T) { tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "int64.test", Gauge, slice, []string{}, "") + dims := newDims("int64.test") + tr.mapNumberMetrics(ctx, consumer, dims, Gauge, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newGauge("int64.test", uint64(ts), 17, []string{})}, + []metric{newGauge(dims, uint64(ts), 17)}, ) consumer = &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "int64.delta.test", Count, slice, []string{}, "") + dims = newDims("int64.delta.test") + tr.mapNumberMetrics(ctx, consumer, dims, Count, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newCount("int64.delta.test", uint64(ts), 17, []string{})}, + []metric{newCount(dims, uint64(ts), 17)}, ) // With attribute tags consumer = &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "int64.test", Gauge, slice, []string{"attribute_tag:attribute_value"}, "") + dims = metricsDimensions{name: "int64.test", tags: []string{"attribute_tag:attribute_value"}} + tr.mapNumberMetrics(ctx, consumer, dims, Gauge, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newGauge("int64.test", uint64(ts), 17, []string{"attribute_tag:attribute_value"})}, + []metric{newGauge(dims, uint64(ts), 17)}, ) } @@ -222,25 +216,28 @@ func TestMapDoubleMetrics(t *testing.T) { tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "float64.test", Gauge, slice, []string{}, "") + dims := newDims("float64.test") + tr.mapNumberMetrics(ctx, consumer, dims, Gauge, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newGauge("float64.test", uint64(ts), math.Pi, []string{})}, + []metric{newGauge(dims, uint64(ts), math.Pi)}, ) consumer = &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "float64.delta.test", Count, slice, []string{}, "") + dims = newDims("float64.delta.test") + tr.mapNumberMetrics(ctx, consumer, dims, Count, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newCount("float64.delta.test", uint64(ts), math.Pi, []string{})}, + []metric{newCount(dims, uint64(ts), math.Pi)}, ) // With attribute tags consumer = &mockTimeSeriesConsumer{} - tr.mapNumberMetrics(ctx, consumer, "float64.test", Gauge, slice, []string{"attribute_tag:attribute_value"}, "") + dims = metricsDimensions{name: "float64.test", tags: []string{"attribute_tag:attribute_value"}} + tr.mapNumberMetrics(ctx, consumer, dims, Gauge, slice) assert.ElementsMatch(t, consumer.metrics, - []metric{newGauge("float64.test", uint64(ts), math.Pi, []string{"attribute_tag:attribute_value"})}, + []metric{newGauge(dims, uint64(ts), math.Pi)}, ) } @@ -248,6 +245,8 @@ func seconds(i int) pdata.Timestamp { return pdata.NewTimestampFromTime(time.Unix(int64(i), 0)) } +var exampleDims metricsDimensions = newDims("metric.example") + func TestMapIntMonotonicMetrics(t *testing.T) { // Create list of values deltas := []int64{1, 2, 200, 3, 7, 0} @@ -267,22 +266,20 @@ func TestMapIntMonotonicMetrics(t *testing.T) { } // Map to Datadog format - metricName := "metric.example" expected := make([]metric, len(deltas)) for i, val := range deltas { - expected[i] = newCount(metricName, uint64(seconds(i+1)), float64(val), []string{}) + expected[i] = newCount(exampleDims, uint64(seconds(i+1)), float64(val)) } ctx := context.Background() consumer := &mockTimeSeriesConsumer{} tr := newTranslator(t, zap.NewNop()) - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, expected, consumer.metrics) } func TestMapIntMonotonicDifferentDimensions(t *testing.T) { - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() // No tags @@ -317,20 +314,19 @@ func TestMapIntMonotonicDifferentDimensions(t *testing.T) { tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(1)), 20, []string{}), - newCount(metricName, uint64(seconds(1)), 30, []string{"key1:valA"}), - newCount(metricName, uint64(seconds(1)), 40, []string{"key1:valB"}), + newCount(exampleDims, uint64(seconds(1)), 20), + newCount(exampleDims.AddTags("key1:valA"), uint64(seconds(1)), 30), + newCount(exampleDims.AddTags("key1:valB"), uint64(seconds(1)), 40), }, ) } func TestMapIntMonotonicWithReboot(t *testing.T) { values := []int64{0, 30, 0, 20} - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() slice.EnsureCapacity(len(values)) @@ -343,12 +339,12 @@ func TestMapIntMonotonicWithReboot(t *testing.T) { ctx := context.Background() tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(1)), 30, []string{}), - newCount(metricName, uint64(seconds(3)), 20, []string{}), + newCount(exampleDims, uint64(seconds(1)), 30), + newCount(exampleDims, uint64(seconds(3)), 20), }, ) } @@ -357,7 +353,6 @@ func TestMapIntMonotonicOutOfOrder(t *testing.T) { stamps := []int{1, 0, 2, 3} values := []int64{0, 1, 2, 3} - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() slice.EnsureCapacity(len(values)) @@ -370,12 +365,12 @@ func TestMapIntMonotonicOutOfOrder(t *testing.T) { ctx := context.Background() tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(2)), 2, []string{}), - newCount(metricName, uint64(seconds(3)), 1, []string{}), + newCount(exampleDims, uint64(seconds(2)), 2), + newCount(exampleDims, uint64(seconds(3)), 1), }, ) } @@ -398,22 +393,20 @@ func TestMapDoubleMonotonicMetrics(t *testing.T) { } // Map to Datadog format - metricName := "metric.example" expected := make([]metric, len(deltas)) for i, val := range deltas { - expected[i] = newCount(metricName, uint64(seconds(i+1)), val, []string{}) + expected[i] = newCount(exampleDims, uint64(seconds(i+1)), val) } ctx := context.Background() consumer := &mockTimeSeriesConsumer{} tr := newTranslator(t, zap.NewNop()) - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, expected, consumer.metrics) } func TestMapDoubleMonotonicDifferentDimensions(t *testing.T) { - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() // No tags @@ -448,20 +441,19 @@ func TestMapDoubleMonotonicDifferentDimensions(t *testing.T) { tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(1)), 20, []string{}), - newCount(metricName, uint64(seconds(1)), 30, []string{"key1:valA"}), - newCount(metricName, uint64(seconds(1)), 40, []string{"key1:valB"}), + newCount(exampleDims, uint64(seconds(1)), 20), + newCount(exampleDims.AddTags("key1:valA"), uint64(seconds(1)), 30), + newCount(exampleDims.AddTags("key1:valB"), uint64(seconds(1)), 40), }, ) } func TestMapDoubleMonotonicWithReboot(t *testing.T) { values := []float64{0, 30, 0, 20} - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() slice.EnsureCapacity(len(values)) @@ -474,12 +466,12 @@ func TestMapDoubleMonotonicWithReboot(t *testing.T) { ctx := context.Background() tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(2)), 30, []string{}), - newCount(metricName, uint64(seconds(6)), 20, []string{}), + newCount(exampleDims, uint64(seconds(2)), 30), + newCount(exampleDims, uint64(seconds(6)), 20), }, ) } @@ -488,7 +480,6 @@ func TestMapDoubleMonotonicOutOfOrder(t *testing.T) { stamps := []int{1, 0, 2, 3} values := []float64{0, 1, 2, 3} - metricName := "metric.example" slice := pdata.NewNumberDataPointSlice() slice.EnsureCapacity(len(values)) @@ -501,12 +492,12 @@ func TestMapDoubleMonotonicOutOfOrder(t *testing.T) { ctx := context.Background() tr := newTranslator(t, zap.NewNop()) consumer := &mockTimeSeriesConsumer{} - tr.mapNumberMonotonicMetrics(ctx, consumer, metricName, slice, []string{}, "") + tr.mapNumberMonotonicMetrics(ctx, consumer, exampleDims, slice) assert.ElementsMatch(t, consumer.metrics, []metric{ - newCount(metricName, uint64(seconds(2)), 2, []string{}), - newCount(metricName, uint64(seconds(3)), 1, []string{}), + newCount(exampleDims, uint64(seconds(2)), 2), + newCount(exampleDims, uint64(seconds(3)), 1), }, ) } @@ -528,6 +519,14 @@ func (c *mockFullConsumer) ConsumeSketch(_ context.Context, name string, ts uint ) } +func dimsWithBucket(dims metricsDimensions, lowerBound string, upperBound string) metricsDimensions { + dims = dims.WithSuffix("bucket") + return dims.AddTags( + fmt.Sprintf("lower_bound:%s", lowerBound), + fmt.Sprintf("upper_bound:%s", upperBound), + ) +} + func TestMapDeltaHistogramMetrics(t *testing.T) { ts := pdata.NewTimestampFromTime(time.Now()) slice := pdata.NewHistogramDataPointSlice() @@ -538,48 +537,46 @@ func TestMapDeltaHistogramMetrics(t *testing.T) { point.SetExplicitBounds([]float64{0}) point.SetTimestamp(ts) + dims := newDims("doubleHist.test") + dimsTags := dims.AddTags("attribute_tag:attribute_value") counts := []metric{ - newCount("doubleHist.test.count", uint64(ts), 20, []string{}), - newCount("doubleHist.test.sum", uint64(ts), math.Pi, []string{}), + newCount(dims.WithSuffix("count"), uint64(ts), 20), + newCount(dims.WithSuffix("sum"), uint64(ts), math.Pi), } countsAttributeTags := []metric{ - newCount("doubleHist.test.count", uint64(ts), 20, []string{"attribute_tag:attribute_value"}), - newCount("doubleHist.test.sum", uint64(ts), math.Pi, []string{"attribute_tag:attribute_value"}), + newCount(dimsTags.WithSuffix("count"), uint64(ts), 20), + newCount(dimsTags.WithSuffix("sum"), uint64(ts), math.Pi), } bucketsCounts := []metric{ - newCount("doubleHist.test.bucket", uint64(ts), 2, []string{"lower_bound:-inf", "upper_bound:0"}), - newCount("doubleHist.test.bucket", uint64(ts), 18, []string{"lower_bound:0", "upper_bound:inf"}), + newCount(dimsWithBucket(dims, "-inf", "0"), uint64(ts), 2), + newCount(dimsWithBucket(dims, "0", "inf"), uint64(ts), 18), } bucketsCountsAttributeTags := []metric{ - newCount("doubleHist.test.bucket", uint64(ts), 2, []string{"lower_bound:-inf", "upper_bound:0", "attribute_tag:attribute_value"}), - newCount("doubleHist.test.bucket", uint64(ts), 18, []string{"lower_bound:0", "upper_bound:inf", "attribute_tag:attribute_value"}), + newCount(dimsWithBucket(dimsTags, "-inf", "0"), uint64(ts), 2), + newCount(dimsWithBucket(dimsTags, "0", "inf"), uint64(ts), 18), } sketches := []sketch{ - newSketch("doubleHist.test", uint64(ts), summary.Summary{ + newSketch(dims, uint64(ts), summary.Summary{ Min: 0, Max: 0, Sum: 0, Avg: 0, Cnt: 20, - }, - []string{}, - ), + }), } sketchesAttributeTags := []sketch{ - newSketch("doubleHist.test", uint64(ts), summary.Summary{ + newSketch(dimsTags, uint64(ts), summary.Summary{ Min: 0, Max: 0, Sum: 0, Avg: 0, Cnt: 20, - }, - []string{"attribute_tag:attribute_value"}, - ), + }), } ctx := context.Background() @@ -597,7 +594,6 @@ func TestMapDeltaHistogramMetrics(t *testing.T) { name: "No buckets: send count & sum metrics, no attribute tags", histogramMode: HistogramModeNoBuckets, sendCountSum: true, - tags: []string{}, expectedMetrics: counts, expectedSketches: []sketch{}, }, @@ -681,8 +677,8 @@ func TestMapDeltaHistogramMetrics(t *testing.T) { tr.cfg.HistMode = testInstance.histogramMode tr.cfg.SendCountSum = testInstance.sendCountSum consumer := &mockFullConsumer{} - - tr.mapHistogramMetrics(ctx, consumer, "doubleHist.test", slice, delta, testInstance.tags, "") + dims := metricsDimensions{name: "doubleHist.test", tags: testInstance.tags} + tr.mapHistogramMetrics(ctx, consumer, dims, slice, delta) assert.ElementsMatch(t, consumer.metrics, testInstance.expectedMetrics) assert.ElementsMatch(t, consumer.sketches, testInstance.expectedSketches) }) @@ -705,26 +701,25 @@ func TestMapCumulativeHistogramMetrics(t *testing.T) { point.SetExplicitBounds([]float64{0}) point.SetTimestamp(seconds(2)) + dims := newDims("doubleHist.test") counts := []metric{ - newCount("doubleHist.test.count", uint64(seconds(2)), 30, []string{}), - newCount("doubleHist.test.sum", uint64(seconds(2)), 20, []string{}), + newCount(dims.WithSuffix("count"), uint64(seconds(2)), 30), + newCount(dims.WithSuffix("sum"), uint64(seconds(2)), 20), } bucketsCounts := []metric{ - newCount("doubleHist.test.bucket", uint64(seconds(2)), 11, []string{"lower_bound:-inf", "upper_bound:0"}), - newCount("doubleHist.test.bucket", uint64(seconds(2)), 19, []string{"lower_bound:0", "upper_bound:inf"}), + newCount(dimsWithBucket(dims, "-inf", "0"), uint64(seconds(2)), 11), + newCount(dimsWithBucket(dims, "0", "inf"), uint64(seconds(2)), 19), } sketches := []sketch{ - newSketch("doubleHist.test", uint64(seconds(2)), summary.Summary{ + newSketch(dims, uint64(seconds(2)), summary.Summary{ Min: 0, Max: 0, Sum: 0, Avg: 0, Cnt: 30, - }, - []string{}, - ), + }), } ctx := context.Background() @@ -780,8 +775,8 @@ func TestMapCumulativeHistogramMetrics(t *testing.T) { tr.cfg.HistMode = testInstance.histogramMode tr.cfg.SendCountSum = testInstance.sendCountSum consumer := &mockFullConsumer{} - - tr.mapHistogramMetrics(ctx, consumer, "doubleHist.test", slice, delta, []string{}, "") + dims := newDims("doubleHist.test") + tr.mapHistogramMetrics(ctx, consumer, dims, slice, delta) assert.ElementsMatch(t, consumer.metrics, testInstance.expectedMetrics) assert.ElementsMatch(t, consumer.sketches, testInstance.expectedSketches) }) @@ -800,7 +795,8 @@ func TestLegacyBucketsTags(t *testing.T) { pointOne.SetExplicitBounds([]float64{0}) pointOne.SetTimestamp(seconds(0)) consumer := &mockTimeSeriesConsumer{} - tr.getLegacyBuckets(ctx, consumer, "test.histogram.one", pointOne, true, tags, "") + dims := metricsDimensions{name: "test.histogram.one", tags: tags} + tr.getLegacyBuckets(ctx, consumer, dims, pointOne, true) seriesOne := consumer.metrics pointTwo := pdata.NewHistogramDataPoint() @@ -808,7 +804,8 @@ func TestLegacyBucketsTags(t *testing.T) { pointTwo.SetExplicitBounds([]float64{1}) pointTwo.SetTimestamp(seconds(0)) consumer = &mockTimeSeriesConsumer{} - tr.getLegacyBuckets(ctx, consumer, "test.histogram.two", pointTwo, true, tags, "") + dims = metricsDimensions{name: "test.histogram.two", tags: tags} + tr.getLegacyBuckets(ctx, consumer, dims, pointTwo, true) seriesTwo := consumer.metrics assert.ElementsMatch(t, seriesOne[0].tags, []string{"lower_bound:-inf", "upper_bound:0"}) @@ -871,8 +868,8 @@ func TestMapSummaryMetrics(t *testing.T) { newTranslator := func(tags []string, quantiles bool) *Translator { c := newTestCache() - c.cache.Set(c.metricDimensionsToMapKey("summary.example.count", tags), numberCounter{0, 0, 1}, gocache.NoExpiration) - c.cache.Set(c.metricDimensionsToMapKey("summary.example.sum", tags), numberCounter{0, 0, 1}, gocache.NoExpiration) + c.cache.Set((&metricsDimensions{name: "summary.example.count", tags: tags}).String(), numberCounter{0, 0, 1}, gocache.NoExpiration) + c.cache.Set((&metricsDimensions{name: "summary.example.sum", tags: tags}).String(), numberCounter{0, 0, 1}, gocache.NoExpiration) options := []Option{WithFallbackHostnameProvider(testProvider("fallbackHostname"))} if quantiles { options = append(options, WithQuantiles()) @@ -883,53 +880,57 @@ func TestMapSummaryMetrics(t *testing.T) { return tr } + dims := newDims("summary.example") noQuantiles := []metric{ - newCount("summary.example.count", uint64(ts), 100, []string{}), - newCount("summary.example.sum", uint64(ts), 10_000, []string{}), + newCount(dims.WithSuffix("count"), uint64(ts), 100), + newCount(dims.WithSuffix("sum"), uint64(ts), 10_000), } + qBaseDims := dims.WithSuffix("quantile") quantiles := []metric{ - newGauge("summary.example.quantile", uint64(ts), 0, []string{"quantile:0"}), - newGauge("summary.example.quantile", uint64(ts), 100, []string{"quantile:0.5"}), - newGauge("summary.example.quantile", uint64(ts), 500, []string{"quantile:0.999"}), - newGauge("summary.example.quantile", uint64(ts), 600, []string{"quantile:1.0"}), + newGauge(qBaseDims.AddTags("quantile:0"), uint64(ts), 0), + newGauge(qBaseDims.AddTags("quantile:0.5"), uint64(ts), 100), + newGauge(qBaseDims.AddTags("quantile:0.999"), uint64(ts), 500), + newGauge(qBaseDims.AddTags("quantile:1.0"), uint64(ts), 600), } ctx := context.Background() tr := newTranslator([]string{}, false) consumer := &mockTimeSeriesConsumer{} - tr.mapSummaryMetrics(ctx, consumer, "summary.example", slice, []string{}, "") + tr.mapSummaryMetrics(ctx, consumer, dims, slice) assert.ElementsMatch(t, consumer.metrics, noQuantiles, ) tr = newTranslator([]string{}, true) consumer = &mockTimeSeriesConsumer{} - tr.mapSummaryMetrics(ctx, consumer, "summary.example", slice, []string{}, "") + tr.mapSummaryMetrics(ctx, consumer, dims, slice) assert.ElementsMatch(t, consumer.metrics, append(noQuantiles, quantiles...), ) + dimsTags := dims.AddTags("attribute_tag:attribute_value") noQuantilesAttr := []metric{ - newCount("summary.example.count", uint64(ts), 100, []string{"attribute_tag:attribute_value"}), - newCount("summary.example.sum", uint64(ts), 10_000, []string{"attribute_tag:attribute_value"}), + newCount(dimsTags.WithSuffix("count"), uint64(ts), 100), + newCount(dimsTags.WithSuffix("sum"), uint64(ts), 10_000), } + qBaseDimsTags := dimsTags.WithSuffix("quantile") quantilesAttr := []metric{ - newGauge("summary.example.quantile", uint64(ts), 0, []string{"quantile:0", "attribute_tag:attribute_value"}), - newGauge("summary.example.quantile", uint64(ts), 100, []string{"quantile:0.5", "attribute_tag:attribute_value"}), - newGauge("summary.example.quantile", uint64(ts), 500, []string{"quantile:0.999", "attribute_tag:attribute_value"}), - newGauge("summary.example.quantile", uint64(ts), 600, []string{"quantile:1.0", "attribute_tag:attribute_value"}), + newGauge(qBaseDimsTags.AddTags("quantile:0"), uint64(ts), 0), + newGauge(qBaseDimsTags.AddTags("quantile:0.5"), uint64(ts), 100), + newGauge(qBaseDimsTags.AddTags("quantile:0.999"), uint64(ts), 500), + newGauge(qBaseDimsTags.AddTags("quantile:1.0"), uint64(ts), 600), } tr = newTranslator([]string{"attribute_tag:attribute_value"}, false) consumer = &mockTimeSeriesConsumer{} - tr.mapSummaryMetrics(ctx, consumer, "summary.example", slice, []string{"attribute_tag:attribute_value"}, "") + tr.mapSummaryMetrics(ctx, consumer, dimsTags, slice) assert.ElementsMatch(t, consumer.metrics, noQuantilesAttr, ) tr = newTranslator([]string{"attribute_tag:attribute_value"}, true) consumer = &mockTimeSeriesConsumer{} - tr.mapSummaryMetrics(ctx, consumer, "summary.example", slice, []string{"attribute_tag:attribute_value"}, "") + tr.mapSummaryMetrics(ctx, consumer, dimsTags, slice) assert.ElementsMatch(t, consumer.metrics, append(noQuantilesAttr, quantilesAttr...), @@ -1109,19 +1110,22 @@ func createTestMetrics(additionalAttributes map[string]string, name, version str } func newGaugeWithHostname(name string, val float64, tags []string) metric { - m := newGauge(name, 0, val, tags) + dims := newDims(name) + m := newGauge(dims.AddTags(tags...), 0, val) m.host = testHostname return m } func newCountWithHostname(name string, val float64, seconds uint64, tags []string) metric { - m := newCount(name, seconds*1e9, val, tags) + dims := newDims(name) + m := newCount(dims.AddTags(tags...), seconds*1e9, val) m.host = testHostname return m } func newSketchWithHostname(name string, summary summary.Summary, tags []string) sketch { - s := newSketch(name, 0, summary, tags) + dims := newDims(name) + s := newSketch(dims.AddTags(tags...), 0, summary) s.host = testHostname return s } diff --git a/exporter/datadogexporter/internal/translator/sketches_test.go b/exporter/datadogexporter/internal/translator/sketches_test.go index 3919562e7bda..f0482c71160d 100644 --- a/exporter/datadogexporter/internal/translator/sketches_test.go +++ b/exporter/datadogexporter/internal/translator/sketches_test.go @@ -106,12 +106,12 @@ func TestHistogramSketches(t *testing.T) { cfg := quantile.Default() ctx := context.Background() tr := newTranslator(t, zap.NewNop()) - + dims := metricsDimensions{name: "test"} for _, test := range tests { t.Run(test.name, func(t *testing.T) { p := fromCDF(test.cdf) consumer := &sketchConsumer{} - tr.getSketchBuckets(ctx, consumer, "test", p, true, []string{}, "") + tr.getSketchBuckets(ctx, consumer, dims, p, true) sk := consumer.sk // Check the minimum is 0.0 @@ -199,11 +199,12 @@ func TestInfiniteBounds(t *testing.T) { ctx := context.Background() tr := newTranslator(t, zap.NewNop()) + dims := metricsDimensions{name: "test"} for _, testInstance := range tests { t.Run(testInstance.name, func(t *testing.T) { p := testInstance.getHist() consumer := &sketchConsumer{} - tr.getSketchBuckets(ctx, consumer, "test", p, true, []string{}, "") + tr.getSketchBuckets(ctx, consumer, dims, p, true) sk := consumer.sk assert.InDelta(t, sk.Basic.Sum, p.Sum(), 1) assert.Equal(t, uint64(sk.Basic.Cnt), p.Count()) diff --git a/exporter/datadogexporter/internal/translator/ttlcache.go b/exporter/datadogexporter/internal/translator/ttlcache.go index 7db77f966e0b..2e7c54068901 100644 --- a/exporter/datadogexporter/internal/translator/ttlcache.go +++ b/exporter/datadogexporter/internal/translator/ttlcache.go @@ -15,17 +15,11 @@ package translator import ( - "sort" - "strings" "time" gocache "github.com/patrickmn/go-cache" ) -const ( - metricKeySeparator = string(byte(0)) -) - type ttlCache struct { cache *gocache.Cache } @@ -43,53 +37,27 @@ func newTTLCache(sweepInterval int64, deltaTTL int64) *ttlCache { return &ttlCache{cache} } -// Uses a logic similar to what is done in the span processor to build metric keys: -// https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/b2327211df976e0a57ef0425493448988772a16b/processor/spanmetricsprocessor/processor.go#L353-L387 -// TODO: make this a public util function? -func concatDimensionValue(metricKeyBuilder *strings.Builder, value string) { - metricKeyBuilder.WriteString(value) - metricKeyBuilder.WriteString(metricKeySeparator) -} - -// metricDimensionsToMapKey maps name and tags to a string to use as an identifier -// The tags order does not matter -func (*ttlCache) metricDimensionsToMapKey(name string, tags []string) string { - var metricKeyBuilder strings.Builder - - dimensions := make([]string, len(tags)) - copy(dimensions, tags) - - dimensions = append(dimensions, name) - sort.Strings(dimensions) - - for _, dim := range dimensions { - concatDimensionValue(&metricKeyBuilder, dim) - } - return metricKeyBuilder.String() -} - // Diff submits a new value for a given non-monotonic metric and returns the difference with the // last submitted value (ordered by timestamp). The diff value is only valid if `ok` is true. -func (t *ttlCache) Diff(name string, tags []string, startTs, ts uint64, val float64) (float64, bool) { - return t.putAndGetDiff(name, tags, false, startTs, ts, val) +func (t *ttlCache) Diff(dimensions metricsDimensions, startTs, ts uint64, val float64) (float64, bool) { + return t.putAndGetDiff(dimensions, false, startTs, ts, val) } // MonotonicDiff submits a new value for a given monotonic metric and returns the difference with the // last submitted value (ordered by timestamp). The diff value is only valid if `ok` is true. -func (t *ttlCache) MonotonicDiff(name string, tags []string, startTs, ts uint64, val float64) (float64, bool) { - return t.putAndGetDiff(name, tags, true, startTs, ts, val) +func (t *ttlCache) MonotonicDiff(dimensions metricsDimensions, startTs, ts uint64, val float64) (float64, bool) { + return t.putAndGetDiff(dimensions, true, startTs, ts, val) } // putAndGetDiff submits a new value for a given metric and returns the difference with the // last submitted value (ordered by timestamp). The diff value is only valid if `ok` is true. func (t *ttlCache) putAndGetDiff( - name string, - tags []string, + dimensions metricsDimensions, monotonic bool, startTs, ts uint64, val float64, ) (dx float64, ok bool) { - key := t.metricDimensionsToMapKey(name, tags) + key := dimensions.String() if c, found := t.cache.Get(key); found { cnt := c.(numberCounter) if cnt.ts > ts { diff --git a/exporter/datadogexporter/internal/translator/ttlcache_test.go b/exporter/datadogexporter/internal/translator/ttlcache_test.go index 25116097a4e4..fdfc513a9265 100644 --- a/exporter/datadogexporter/internal/translator/ttlcache_test.go +++ b/exporter/datadogexporter/internal/translator/ttlcache_test.go @@ -25,16 +25,18 @@ func newTestCache() *ttlCache { return cache } +var dims metricsDimensions = metricsDimensions{name: "test"} + func TestMonotonicDiffUnknownStart(t *testing.T) { startTs := uint64(0) // equivalent to start being unset prevPts := newTestCache() - _, ok := prevPts.MonotonicDiff("test", []string{}, startTs, 1, 5) + _, ok := prevPts.MonotonicDiff(dims, startTs, 1, 5) assert.False(t, ok, "expected no diff: first point") - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 0, 0) + _, ok = prevPts.MonotonicDiff(dims, startTs, 0, 0) assert.False(t, ok, "expected no diff: old point") - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 2, 2) + _, ok = prevPts.MonotonicDiff(dims, startTs, 2, 2) assert.False(t, ok, "expected no diff: new < old") - dx, ok := prevPts.MonotonicDiff("test", []string{}, startTs, 3, 4) + dx, ok := prevPts.MonotonicDiff(dims, startTs, 3, 4) assert.True(t, ok, "expected diff: no startTs, old >= new") assert.Equal(t, 2.0, dx, "expected diff 2.0 with (0,2,2) value") } @@ -42,14 +44,14 @@ func TestMonotonicDiffUnknownStart(t *testing.T) { func TestDiffUnknownStart(t *testing.T) { startTs := uint64(0) // equivalent to start being unset prevPts := newTestCache() - _, ok := prevPts.Diff("test", []string{}, startTs, 1, 5) + _, ok := prevPts.Diff(dims, startTs, 1, 5) assert.False(t, ok, "expected no diff: first point") - _, ok = prevPts.Diff("test", []string{}, startTs, 0, 0) + _, ok = prevPts.Diff(dims, startTs, 0, 0) assert.False(t, ok, "expected no diff: old point") - dx, ok := prevPts.Diff("test", []string{}, startTs, 2, 2) + dx, ok := prevPts.Diff(dims, startTs, 2, 2) assert.True(t, ok, "expected diff: no startTs, not monotonic") assert.Equal(t, -3.0, dx, "expected diff -3.0 with (0,1,5) value") - dx, ok = prevPts.Diff("test", []string{}, startTs, 3, 4) + dx, ok = prevPts.Diff(dims, startTs, 3, 4) assert.True(t, ok, "expected diff: no startTs, old >= new") assert.Equal(t, 2.0, dx, "expected diff 2.0 with (0,2,2) value") } @@ -57,27 +59,27 @@ func TestDiffUnknownStart(t *testing.T) { func TestMonotonicDiffKnownStart(t *testing.T) { startTs := uint64(1) prevPts := newTestCache() - _, ok := prevPts.MonotonicDiff("test", []string{}, startTs, 1, 5) + _, ok := prevPts.MonotonicDiff(dims, startTs, 1, 5) assert.False(t, ok, "expected no diff: first point") - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 0, 0) + _, ok = prevPts.MonotonicDiff(dims, startTs, 0, 0) assert.False(t, ok, "expected no diff: old point") - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 2, 2) + _, ok = prevPts.MonotonicDiff(dims, startTs, 2, 2) assert.False(t, ok, "expected no diff: new < old") - dx, ok := prevPts.MonotonicDiff("test", []string{}, startTs, 3, 4) + dx, ok := prevPts.MonotonicDiff(dims, startTs, 3, 4) assert.True(t, ok, "expected diff: same startTs, old >= new") assert.Equal(t, 2.0, dx, "expected diff 2.0 with (0,2,2) value") startTs = uint64(4) // simulate reset with startTs = ts - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, startTs, 8) + _, ok = prevPts.MonotonicDiff(dims, startTs, startTs, 8) assert.False(t, ok, "expected no diff: reset with unknown start") - dx, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 5, 9) + dx, ok = prevPts.MonotonicDiff(dims, startTs, 5, 9) assert.True(t, ok, "expected diff: same startTs, old >= new") assert.Equal(t, 1.0, dx, "expected diff 1.0 with (4,4,8) value") startTs = uint64(6) - _, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 7, 1) + _, ok = prevPts.MonotonicDiff(dims, startTs, 7, 1) assert.False(t, ok, "expected no diff: reset with known start") - dx, ok = prevPts.MonotonicDiff("test", []string{}, startTs, 8, 10) + dx, ok = prevPts.MonotonicDiff(dims, startTs, 8, 10) assert.True(t, ok, "expected diff: same startTs, old >= new") assert.Equal(t, 9.0, dx, "expected diff 9.0 with (6,7,1) value") } @@ -85,61 +87,28 @@ func TestMonotonicDiffKnownStart(t *testing.T) { func TestDiffKnownStart(t *testing.T) { startTs := uint64(1) prevPts := newTestCache() - _, ok := prevPts.Diff("test", []string{}, startTs, 1, 5) + _, ok := prevPts.Diff(dims, startTs, 1, 5) assert.False(t, ok, "expected no diff: first point") - _, ok = prevPts.Diff("test", []string{}, startTs, 0, 0) + _, ok = prevPts.Diff(dims, startTs, 0, 0) assert.False(t, ok, "expected no diff: old point") - dx, ok := prevPts.Diff("test", []string{}, startTs, 2, 2) + dx, ok := prevPts.Diff(dims, startTs, 2, 2) assert.True(t, ok, "expected diff: same startTs, not monotonic") assert.Equal(t, -3.0, dx, "expected diff -3.0 with (1,1,5) point") - dx, ok = prevPts.Diff("test", []string{}, startTs, 3, 4) + dx, ok = prevPts.Diff(dims, startTs, 3, 4) assert.True(t, ok, "expected diff: same startTs, not monotonic") assert.Equal(t, 2.0, dx, "expected diff 2.0 with (0,2,2) value") startTs = uint64(4) // simulate reset with startTs = ts - _, ok = prevPts.Diff("test", []string{}, startTs, startTs, 8) + _, ok = prevPts.Diff(dims, startTs, startTs, 8) assert.False(t, ok, "expected no diff: reset with unknown start") - dx, ok = prevPts.Diff("test", []string{}, startTs, 5, 9) + dx, ok = prevPts.Diff(dims, startTs, 5, 9) assert.True(t, ok, "expected diff: same startTs, not monotonic") assert.Equal(t, 1.0, dx, "expected diff 1.0 with (4,4,8) value") startTs = uint64(6) - _, ok = prevPts.Diff("test", []string{}, startTs, 7, 1) + _, ok = prevPts.Diff(dims, startTs, 7, 1) assert.False(t, ok, "expected no diff: reset with known start") - dx, ok = prevPts.Diff("test", []string{}, startTs, 8, 10) + dx, ok = prevPts.Diff(dims, startTs, 8, 10) assert.True(t, ok, "expected diff: same startTs, not monotonic") assert.Equal(t, 9.0, dx, "expected diff 9.0 with (6,7,1) value") } - -func TestMetricDimensionsToMapKey(t *testing.T) { - metricName := "metric.name" - c := newTestCache() - noTags := c.metricDimensionsToMapKey(metricName, []string{}) - someTags := c.metricDimensionsToMapKey(metricName, []string{"key1:val1", "key2:val2"}) - sameTags := c.metricDimensionsToMapKey(metricName, []string{"key2:val2", "key1:val1"}) - diffTags := c.metricDimensionsToMapKey(metricName, []string{"key3:val3"}) - - assert.NotEqual(t, noTags, someTags) - assert.NotEqual(t, someTags, diffTags) - assert.Equal(t, someTags, sameTags) -} - -func TestMetricDimensionsToMapKeyNoTagsChange(t *testing.T) { - // The original metricDimensionsToMapKey had an issue where: - // - if the capacity of the tags array passed to it was higher than its length - // - and the metric name is earlier (in alphabetical order) than one of the tags - // then the original tag array would be modified (without a reallocation, since there is enough capacity), - // and would contain a tag labeled as the metric name, while the final tag (in alphabetical order) - // would get left out. - // This test checks that this doesn't happen anymore. - - metricName := "a.metric.name" - c := newTestCache() - - originalTags := make([]string, 2, 3) - originalTags[0] = "key1:val1" - originalTags[1] = "key2:val2" - c.metricDimensionsToMapKey(metricName, originalTags) - assert.Equal(t, []string{"key1:val1", "key2:val2"}, originalTags) - -} From e96a65bb7fae87506fa1b22a5b0472e0c0d6df3a Mon Sep 17 00:00:00 2001 From: Pablo Baeyens Date: Tue, 23 Nov 2021 12:18:41 +0100 Subject: [PATCH 2/3] Defensively copy tags on AddTags --- .../datadogexporter/internal/translator/dimensions.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/exporter/datadogexporter/internal/translator/dimensions.go b/exporter/datadogexporter/internal/translator/dimensions.go index ff7c6a41bc86..713849cd7aac 100644 --- a/exporter/datadogexporter/internal/translator/dimensions.go +++ b/exporter/datadogexporter/internal/translator/dimensions.go @@ -45,13 +45,15 @@ func getTags(labels pdata.AttributeMap) []string { return tags } -// AddTags to metrics dimensions. The argument may be modified. +// AddTags to metrics dimensions. func (m *metricsDimensions) AddTags(tags ...string) metricsDimensions { + // defensively copy the tags + newTags := make([]string, 0, len(tags)+len(m.tags)) + newTags = append(newTags, tags...) + newTags = append(newTags, m.tags...) return metricsDimensions{ name: m.name, - // append the field to the passed argument, - // so that the slice we modify is the one we get as an argument. - tags: append(tags, m.tags...), + tags: newTags, host: m.host, } } From 7c3482bb3caea3adc3ba437bca68d086688758f8 Mon Sep 17 00:00:00 2001 From: Pablo Baeyens Date: Tue, 23 Nov 2021 12:26:56 +0100 Subject: [PATCH 3/3] [exporter/datadogexporter] Fix m.host and add test for this --- .../internal/translator/dimensions.go | 2 +- .../internal/translator/dimensions_test.go | 16 ++++++++++------ 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/exporter/datadogexporter/internal/translator/dimensions.go b/exporter/datadogexporter/internal/translator/dimensions.go index 713849cd7aac..2e553a889f98 100644 --- a/exporter/datadogexporter/internal/translator/dimensions.go +++ b/exporter/datadogexporter/internal/translator/dimensions.go @@ -89,7 +89,7 @@ func (m *metricsDimensions) String() string { copy(dimensions, m.tags) dimensions = append(dimensions, fmt.Sprintf("name:%s", m.name)) - dimensions = append(dimensions, fmt.Sprintf("host:%s", m.name)) + dimensions = append(dimensions, fmt.Sprintf("host:%s", m.host)) sort.Strings(dimensions) for _, dim := range dimensions { diff --git a/exporter/datadogexporter/internal/translator/dimensions_test.go b/exporter/datadogexporter/internal/translator/dimensions_test.go index fe1546aaff9d..98d9f07d5467 100644 --- a/exporter/datadogexporter/internal/translator/dimensions_test.go +++ b/exporter/datadogexporter/internal/translator/dimensions_test.go @@ -36,19 +36,23 @@ func TestWithAttributeMap(t *testing.T) { } func TestMetricDimensionsString(t *testing.T) { - getKey := func(name string, tags []string) string { - dims := metricsDimensions{name: name, tags: tags} + getKey := func(name string, tags []string, host string) string { + dims := metricsDimensions{name: name, tags: tags, host: host} return dims.String() } metricName := "metric.name" - noTags := getKey(metricName, []string{}) - someTags := getKey(metricName, []string{"key1:val1", "key2:val2"}) - sameTags := getKey(metricName, []string{"key2:val2", "key1:val1"}) - diffTags := getKey(metricName, []string{"key3:val3"}) + hostOne := "host-one" + hostTwo := "host-two" + noTags := getKey(metricName, []string{}, hostOne) + someTags := getKey(metricName, []string{"key1:val1", "key2:val2"}, hostOne) + sameTags := getKey(metricName, []string{"key2:val2", "key1:val1"}, hostOne) + diffTags := getKey(metricName, []string{"key3:val3"}, hostOne) + diffHost := getKey(metricName, []string{"key1:val1", "key2:val2"}, hostTwo) assert.NotEqual(t, noTags, someTags) assert.NotEqual(t, someTags, diffTags) assert.Equal(t, someTags, sameTags) + assert.NotEqual(t, someTags, diffHost) } func TestMetricDimensionsStringNoTagsChange(t *testing.T) {