Skip to content

Commit

Permalink
[procesor/groupbyattrsprocessor]: fix droping metadata when processin…
Browse files Browse the repository at this point in the history
…g metrics (#33781)

**Description:**
Fixes the metadata dropping when processing metrics

**Link to tracking Issue:** #33419 

**Testing:** <Describe what testing was performed and which tests were
added.>
- unit tests

---------

Signed-off-by: odubajDT <ondrej.dubaj@dynatrace.com>
  • Loading branch information
odubajDT committed Jul 15, 2024
1 parent 62fa62f commit a078f7a
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 0 deletions.
27 changes: 27 additions & 0 deletions .chloggen/fix-metrics-metadata.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: processor/groupbyattrsprocessor

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: "Fix dropping of metadata fields when processing metrics."

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [33419]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
1 change: 1 addition & 0 deletions processor/groupbyattrsprocessor/processor.go
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ func getMetricInInstrumentationLibrary(ilm pmetric.ScopeMetrics, searchedMetric
metric.SetDescription(searchedMetric.Description())
metric.SetName(searchedMetric.Name())
metric.SetUnit(searchedMetric.Unit())
searchedMetric.Metadata().CopyTo(metric.Metadata())

// Move other special type specific values
//exhaustive:enforce
Expand Down
53 changes: 53 additions & 0 deletions processor/groupbyattrsprocessor/processor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -869,6 +869,59 @@ func TestCompacting(t *testing.T) {
}
}

func Test_GetMetricInInstrumentationLibrary(t *testing.T) {
// input metric with datapoint
m := pmetric.NewMetric()
m.SetName("metric")
m.SetDescription("description")
m.SetUnit("unit")
d := m.SetEmptyGauge().DataPoints().AppendEmpty()
d.SetDoubleValue(1.0)

// expected metric without datapoint
// the datapoints are not copied to the resulting metric, since
// datapoints are moved in between metrics in the processor
m2 := pmetric.NewMetric()
m2.SetName("metric")
m2.SetDescription("description")
m2.SetUnit("unit")
m2.SetEmptyGauge()

metadata := pcommon.NewMap()
metadata.PutStr("key", "val")
metadata.CopyTo(m.Metadata())
metadata.CopyTo(m2.Metadata())

sm := pmetric.NewScopeMetrics()
m.CopyTo(sm.Metrics().AppendEmpty())

tests := []struct {
name string
ilm pmetric.ScopeMetrics
searched pmetric.Metric
want pmetric.Metric
}{
{
name: "existing metric",
ilm: sm,
searched: m,
want: m,
},
{
name: "non-existing metric - datapoints will be removed",
ilm: pmetric.NewScopeMetrics(),
searched: m,
want: m2,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
require.Equal(t, getMetricInInstrumentationLibrary(tt.ilm, tt.searched), tt.want)
})
}
}

func BenchmarkCompacting(bb *testing.B) {
runs := []struct {
ilCount int
Expand Down

0 comments on commit a078f7a

Please sign in to comment.