Skip to content

Commit

Permalink
[receiver/prometheusreceiver] Use _created metric, if present, to s…
Browse files Browse the repository at this point in the history
…et StartTimeUnixNano on Sum, Histogram, Summaries (#17498)

Use _created metric, if present, to set StartTimeUnixNano on Sum, Histogram, Summary metrics.
Resolves #12428
  • Loading branch information
kovrus authored Jan 12, 2023
1 parent 98368b6 commit 6367a68
Show file tree
Hide file tree
Showing 10 changed files with 284 additions and 38 deletions.
22 changes: 22 additions & 0 deletions .chloggen/prw-receiver-use-created-metric.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: receiver/prometheusreceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Use `_created` metrics, if present, to set `StartTimeUnixNano` for Sum, Histogram and Summary metrics.

# One or more tracking issues related to the change
issues: [12428]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: |
If the `_created` metric was present in the Histogram, Summary or Counter
metric family, only then its values is used to set `StartTimeUnixNano` on the
relevant OTLP metric and dropped afterwards. Otherwise, it is converted to the
monotonic OTLP Sum metric.
This behaviour is disabled by default. Use the `receiver.prometheusreceiver.UseCreatedMetric`
to enable it.
10 changes: 10 additions & 0 deletions receiver/prometheusreceiver/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,16 @@ you must escape them using `$$`.
prometheus --config.file=prom.yaml
```

**Feature gates**:

- `receiver.prometheusreceiver.UseCreatedMetric`: Start time for Summary, Histogram
and Sum metrics can be retrieved from `_created` metrics. Currently, this behaviour
is disabled by default. To enable it, use the following feature gate option:

```shell
"--feature-gates=receiver.prometheusreceiver.UseCreatedMetric"
```

You can copy and paste that same configuration under:

```yaml
Expand Down
14 changes: 12 additions & 2 deletions receiver/prometheusreceiver/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,20 @@ import (
// This file implements config for Prometheus receiver.

const (
typeStr = "prometheus"
stability = component.StabilityLevelBeta
typeStr = "prometheus"
stability = component.StabilityLevelBeta
useCreatedMetricGateID = "receiver.prometheusreceiver.UseCreatedMetric"
)

func init() {
featuregate.GetRegistry().MustRegisterID(
useCreatedMetricGateID,
featuregate.StageAlpha,
featuregate.WithRegisterDescription("When enabled, the Prometheus receiver will"+
" retrieve the start time for Summary, Histogram and Sum metrics from _created metric"),
)
}

var errRenamingDisallowed = errors.New("metric renaming using metric_relabel_configs is disallowed")

// NewFactory creates a new Prometheus receiver factory.
Expand Down
3 changes: 2 additions & 1 deletion receiver/prometheusreceiver/internal/appendable.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,11 +47,12 @@ func NewAppendable(
gcInterval time.Duration,
useStartTimeMetric bool,
startTimeMetricRegex *regexp.Regexp,
useCreatedMetric bool,
externalLabels labels.Labels,
registry *featuregate.Registry) (storage.Appendable, error) {
var metricAdjuster MetricsAdjuster
if !useStartTimeMetric {
metricAdjuster = NewInitialPointAdjuster(set.Logger, gcInterval)
metricAdjuster = NewInitialPointAdjuster(set.Logger, gcInterval, useCreatedMetric)
} else {
metricAdjuster = NewStartTimeMetricAdjuster(set.Logger, startTimeMetricRegex)
}
Expand Down
30 changes: 27 additions & 3 deletions receiver/prometheusreceiver/internal/metricfamily.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ type metricGroup struct {
hasCount bool
sum float64
hasSum bool
created float64
value float64
complexValue []*dataPoint
exemplars pmetric.ExemplarSlice
Expand Down Expand Up @@ -141,7 +142,12 @@ func (mg *metricGroup) toDistributionPoint(dest pmetric.HistogramDataPointSlice)

// The timestamp MUST be in retrieved from milliseconds and converted to nanoseconds.
tsNanos := timestampFromMs(mg.ts)
point.SetStartTimestamp(tsNanos) // metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
if mg.created != 0 {
point.SetStartTimestamp(timestampFromFloat64(mg.created))
} else {
// metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
point.SetStartTimestamp(tsNanos)
}
point.SetTimestamp(tsNanos)
populateAttributes(pmetric.MetricTypeHistogram, mg.ls, point.Attributes())
mg.setExemplars(point.Exemplars())
Expand Down Expand Up @@ -196,7 +202,12 @@ func (mg *metricGroup) toSummaryPoint(dest pmetric.SummaryDataPointSlice) {
// The timestamp MUST be in retrieved from milliseconds and converted to nanoseconds.
tsNanos := timestampFromMs(mg.ts)
point.SetTimestamp(tsNanos)
point.SetStartTimestamp(tsNanos) // metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
if mg.created != 0 {
point.SetStartTimestamp(timestampFromFloat64(mg.created))
} else {
// metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
point.SetStartTimestamp(tsNanos)
}
populateAttributes(pmetric.MetricTypeSummary, mg.ls, point.Attributes())
}

Expand All @@ -205,7 +216,12 @@ func (mg *metricGroup) toNumberDataPoint(dest pmetric.NumberDataPointSlice) {
point := dest.AppendEmpty()
// gauge/undefined types have no start time.
if mg.mtype == pmetric.MetricTypeSum {
point.SetStartTimestamp(tsNanos) // metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
if mg.created != 0 {
point.SetStartTimestamp(timestampFromFloat64(mg.created))
} else {
// metrics_adjuster adjusts the startTimestamp to the initial scrape timestamp
point.SetStartTimestamp(tsNanos)
}
}
point.SetTimestamp(tsNanos)
if value.IsStaleNaN(mg.value) {
Expand Down Expand Up @@ -268,13 +284,21 @@ func (mf *metricFamily) addSeries(seriesRef uint64, metricName string, ls labels
mg.ts = t
mg.count = v
mg.hasCount = true
case strings.HasSuffix(metricName, metricSuffixCreated):
mg.created = v
default:
boundary, err := getBoundary(mf.mtype, ls)
if err != nil {
return err
}
mg.complexValue = append(mg.complexValue, &dataPoint{value: v, boundary: boundary})
}
case pmetric.MetricTypeSum:
if strings.HasSuffix(metricName, metricSuffixCreated) {
mg.created = v
} else {
mg.value = v
}
default:
mg.value = v
}
Expand Down
151 changes: 151 additions & 0 deletions receiver/prometheusreceiver/internal/metricfamily_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,12 @@ var mc = testMetadataStore{
Help: "This is some help for a histogram",
Unit: "ms",
},
"histogram_with_created": scrape.MetricMetadata{
Metric: "hg",
Type: textparse.MetricTypeHistogram,
Help: "This is some help for a histogram",
Unit: "ms",
},
"histogram_stale": scrape.MetricMetadata{
Metric: "hg_stale",
Type: textparse.MetricTypeHistogram,
Expand All @@ -84,6 +90,12 @@ var mc = testMetadataStore{
Help: "This is some help for a summary",
Unit: "ms",
},
"summary_with_created": scrape.MetricMetadata{
Metric: "s",
Type: textparse.MetricTypeSummary,
Help: "This is some help for a summary",
Unit: "ms",
},
"summary_stale": scrape.MetricMetadata{
Metric: "s_stale",
Type: textparse.MetricTypeSummary,
Expand Down Expand Up @@ -140,6 +152,49 @@ func TestMetricGroupData_toDistributionUnitTest(t *testing.T) {
return point
},
},
{
name: "histogram with startTimestamp from _created",
metricName: "histogram_with_created",
intervalStartTimeMs: 11,
labels: labels.FromMap(map[string]string{"a": "A"}),
scrapes: []*scrape{
{at: 11, value: 66, metric: "histogram_with_created_count"},
{at: 11, value: 1004.78, metric: "histogram_with_created_sum"},
{at: 11, value: 600.78, metric: "histogram_with_created_created"},
{
at: 11,
value: 33,
metric: "histogram_with_created_bucket",
extraLabel: labels.Label{Name: "le", Value: "0.75"},
},
{
at: 11,
value: 55,
metric: "histogram_with_created_bucket",
extraLabel: labels.Label{Name: "le", Value: "2.75"},
},
{
at: 11,
value: 66,
metric: "histogram_with_created_bucket",
extraLabel: labels.Label{Name: "le", Value: "+Inf"}},
},
want: func() pmetric.HistogramDataPoint {
point := pmetric.NewHistogramDataPoint()
point.SetCount(66)
point.SetSum(1004.78)

// the time in milliseconds -> nanoseconds.
point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond))
point.SetStartTimestamp(timestampFromFloat64(600.78))

point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})
point.BucketCounts().FromRaw([]uint64{33, 22, 11})
attributes := point.Attributes()
attributes.PutStr("a", "A")
return point
},
},
{
name: "histogram that is stale",
metricName: "histogram_stale",
Expand Down Expand Up @@ -310,6 +365,79 @@ func TestMetricGroupData_toSummaryUnitTest(t *testing.T) {
return point
},
},
{
name: "summary_with_created",
labelsScrapes: []*labelsScrapes{
{
labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 10, metric: "summary_with_created_count"},
{at: 14, value: 15, metric: "summary_with_created_sum"},
{at: 14, value: 150, metric: "summary_with_created_created"},
},
},
{
labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 8, metric: "value"},
},
},
{
labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 33.7, metric: "value"},
},
},
{
labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 27, metric: "value"},
},
},
{
labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 56, metric: "value"},
},
},
{
labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),
scrapes: []*scrape{
{at: 14, value: 82, metric: "value"},
},
},
},
want: func() pmetric.SummaryDataPoint {
point := pmetric.NewSummaryDataPoint()
point.SetCount(10)
point.SetSum(15)
qtL := point.QuantileValues()
qn0 := qtL.AppendEmpty()
qn0.SetQuantile(0)
qn0.SetValue(8)
qn50 := qtL.AppendEmpty()
qn50.SetQuantile(.5)
qn50.SetValue(27)
qn75 := qtL.AppendEmpty()
qn75.SetQuantile(.75)
qn75.SetValue(33.7)
qn90 := qtL.AppendEmpty()
qn90.SetQuantile(.9)
qn90.SetValue(56)
qn99 := qtL.AppendEmpty()
qn99.SetQuantile(.99)
qn99.SetValue(82)

// the time in milliseconds -> nanoseconds.
point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond))
point.SetStartTimestamp(timestampFromFloat64(150))

attributes := point.Attributes()
attributes.PutStr("a", "A")
attributes.PutStr("b", "B")
return point
},
},
{
name: "summary_stale",
labelsScrapes: []*labelsScrapes{
Expand Down Expand Up @@ -453,6 +581,29 @@ func TestMetricGroupData_toNumberDataUnitTest(t *testing.T) {
intervalStartTimestampMs int64
want func() pmetric.NumberDataPoint
}{
{
metricKind: "counter",
name: "counter:: startTimestampMs from _created",
intervalStartTimestampMs: 11,
labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
scrapes: []*scrape{
{at: 13, value: 33.7, metric: "value"},
{at: 13, value: 150, metric: "value_created"},
},
want: func() pmetric.NumberDataPoint {
point := pmetric.NewNumberDataPoint()
point.SetDoubleValue(33.7)

// the time in milliseconds -> nanoseconds.
point.SetTimestamp(pcommon.Timestamp(13 * time.Millisecond))
point.SetStartTimestamp(timestampFromFloat64(150))

attributes := point.Attributes()
attributes.PutStr("a", "A")
attributes.PutStr("b", "B")
return point
},
},
{
metricKind: "counter",
name: "counter:: startTimestampMs of 11",
Expand Down
Loading

0 comments on commit 6367a68

Please sign in to comment.