Browse Source

PromQL engine: Delay deletion of __name__ label to the end of the query evaluation (#14477)

PromQL engine: Delay deletion of __name__ label to the end of the query evaluation

  - This change allows optionally preserving the `__name__` label via the `label_replace` and `label_join` functions, and helps prevent the dreaded "vector cannot contain metrics with the same labelset" error.
  - The implementation extends the `Series` and `Sample` structs with a boolean flag indicating whether the `__name__` label should be deleted at the end of the query evaluation.
  - The `label_replace` and `label_join` functions can still access the value of the `__name__` label, even if it has been previously marked for deletion. If  `__name__` is used as target label, it won't be dropped at the end of the query evaluation.
  - Fixes https://github.com/prometheus/prometheus/issues/11397
  - See https://github.com/jcreixell/prometheus/pull/2 for previous discussion, including the decision to create this PR and benchmark it before considering other alternatives (like refactoring `labels.Labels`).
  - See https://github.com/jcreixell/prometheus/pull/1 for an alternative implementation using a special label instead of boolean flags.
  - Note: a feature flag  `promql-delayed-name-removal` has been added as it changes the behavior of some "weird" queries (see https://github.com/prometheus/prometheus/issues/11397#issuecomment-1451998792)

Example (this always fails, as `__name__` is being dropped by `count_over_time`):

```
count_over_time({__name__!=""}[1m])

=> Error executing query: vector cannot contain metrics with the same labelset
```

Before:

```
label_replace(count_over_time({__name__!=""}[1m]), "__name__", "count_$1", "__name__", "(.+)")

=> Error executing query: vector cannot contain metrics with the same labelset
```

After:

```
label_replace(count_over_time({__name__!=""}[1m]), "__name__", "count_$1", "__name__", "(.+)")

=>
count_go_gc_cycles_automatic_gc_cycles_total{instance="localhost:9090", job="prometheus"} 1
count_go_gc_cycles_forced_gc_cycles_total{instance="localhost:9090", job="prometheus"} 1
...
```

Signed-off-by: Jorge Creixell <jcreixell@gmail.com>

---------

Signed-off-by: Jorge Creixell <jcreixell@gmail.com>
Signed-off-by: Björn Rabenstein <github@rabenste.in>
pull/14765/head
Jorge Creixell 3 months ago committed by GitHub
parent
commit
e9e3d64b7c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 14
      cmd/prometheus/main.go
  2. 2
      docs/command-line/prometheus.md
  3. 8
      docs/feature_flags.md
  4. 124
      promql/engine.go
  5. 29
      promql/engine_test.go
  6. 120
      promql/functions.go
  7. 2
      promql/promqltest/test.go
  8. 84
      promql/promqltest/testdata/name_label_dropping.test
  9. 6
      promql/value.go

14
cmd/prometheus/main.go

@ -169,6 +169,8 @@ type flagConfig struct {
corsRegexString string
promlogConfig promlog.Config
promqlEnableDelayedNameRemoval bool
}
// setFeatureListOptions sets the corresponding options from the featureList.
@ -238,6 +240,9 @@ func (c *flagConfig) setFeatureListOptions(logger log.Logger) error {
case "delayed-compaction":
c.tsdb.EnableDelayedCompaction = true
level.Info(logger).Log("msg", "Experimental delayed compaction is enabled.")
case "promql-delayed-name-removal":
c.promqlEnableDelayedNameRemoval = true
level.Info(logger).Log("msg", "Experimental PromQL delayed name removal enabled.")
case "utf8-names":
model.NameValidationScheme = model.UTF8Validation
level.Info(logger).Log("msg", "Experimental UTF-8 support enabled")
@ -487,7 +492,7 @@ func main() {
a.Flag("scrape.name-escaping-scheme", `Method for escaping legacy invalid names when sending to Prometheus that does not support UTF-8. Can be one of "values", "underscores", or "dots".`).Default(scrape.DefaultNameEscapingScheme.String()).StringVar(&cfg.nameEscapingScheme)
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: agent, auto-gomemlimit, exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, remote-write-receiver (DEPRECATED), extra-scrape-metrics, new-service-discovery-manager, auto-gomaxprocs, no-default-scrape-port, native-histograms, otlp-write-receiver, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, utf8-names. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: agent, auto-gomaxprocs, auto-gomemlimit, concurrent-rule-eval, created-timestamp-zero-ingestion, delayed-compaction, exemplar-storage, expand-external-labels, extra-scrape-metrics, memory-snapshot-on-shutdown, native-histograms, new-service-discovery-manager, no-default-scrape-port, otlp-write-receiver, promql-experimental-functions, promql-delayed-name-removal, promql-per-step-stats, remote-write-receiver (DEPRECATED), utf8-names. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
Default("").StringsVar(&cfg.featureList)
promlogflag.AddFlags(a, &cfg.promlogConfig)
@ -799,9 +804,10 @@ func main() {
NoStepSubqueryIntervalFn: noStepSubqueryInterval.Get,
// EnableAtModifier and EnableNegativeOffset have to be
// always on for regular PromQL as of Prometheus v2.33.
EnableAtModifier: true,
EnableNegativeOffset: true,
EnablePerStepStats: cfg.enablePerStepStats,
EnableAtModifier: true,
EnableNegativeOffset: true,
EnablePerStepStats: cfg.enablePerStepStats,
EnableDelayedNameRemoval: cfg.promqlEnableDelayedNameRemoval,
}
queryEngine = promql.NewEngine(opts)

2
docs/command-line/prometheus.md

@ -57,7 +57,7 @@ The Prometheus monitoring server
| <code class="text-nowrap">--query.max-concurrency</code> | Maximum number of queries executed concurrently. Use with server mode only. | `20` |
| <code class="text-nowrap">--query.max-samples</code> | Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return. Use with server mode only. | `50000000` |
| <code class="text-nowrap">--scrape.name-escaping-scheme</code> | Method for escaping legacy invalid names when sending to Prometheus that does not support UTF-8. Can be one of "values", "underscores", or "dots". | `values` |
| <code class="text-nowrap">--enable-feature</code> <code class="text-nowrap">...<code class="text-nowrap"> | Comma separated feature names to enable. Valid options: agent, auto-gomemlimit, exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, remote-write-receiver (DEPRECATED), extra-scrape-metrics, new-service-discovery-manager, auto-gomaxprocs, no-default-scrape-port, native-histograms, otlp-write-receiver, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, utf8-names. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | |
| <code class="text-nowrap">--enable-feature</code> <code class="text-nowrap">...<code class="text-nowrap"> | Comma separated feature names to enable. Valid options: agent, auto-gomaxprocs, auto-gomemlimit, concurrent-rule-eval, created-timestamp-zero-ingestion, delayed-compaction, exemplar-storage, expand-external-labels, extra-scrape-metrics, memory-snapshot-on-shutdown, native-histograms, new-service-discovery-manager, no-default-scrape-port, otlp-write-receiver, promql-experimental-functions, promql-delayed-name-removal, promql-per-step-stats, remote-write-receiver (DEPRECATED), utf8-names. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | |
| <code class="text-nowrap">--log.level</code> | Only log messages with the given severity or above. One of: [debug, info, warn, error] | `info` |
| <code class="text-nowrap">--log.format</code> | Output format of log messages. One of: [logfmt, json] | `logfmt` |

8
docs/feature_flags.md

@ -250,6 +250,14 @@ Note that during this delay, the Head continues its usual operations, which incl
Despite the delay in compaction, the blocks produced are time-aligned in the same manner as they would be if the delay was not in place.
## Delay __name__ label removal for PromQL engine
`--enable-feature=promql-delayed-name-removal`
When enabled, Prometheus will change the way in which the `__name__` label is removed from PromQL query results (for functions and expressions for which this is necessary). Specifically, it will delay the removal to the last step of the query evaluation, instead of every time an expression or function creating derived metrics is evaluated.
This allows optionally preserving the `__name__` label via the `label_replace` and `label_join` functions, and helps prevent the "vector cannot contain metrics with the same labelset" error, which can happen when applying a regex-matcher to the `__name__` label.
## UTF-8 Name Support
`--enable-feature=utf8-names`

124
promql/engine.go

@ -313,6 +313,11 @@ type EngineOpts struct {
// EnablePerStepStats if true allows for per-step stats to be computed on request. Disabled otherwise.
EnablePerStepStats bool
// EnableDelayedNameRemoval delays the removal of the __name__ label to the last step of the query evaluation.
// This is useful in certain scenarios where the __name__ label must be preserved or where applying a
// regex-matcher to the __name__ label may otherwise lead to duplicate labelset errors.
EnableDelayedNameRemoval bool
}
// Engine handles the lifetime of queries from beginning to end.
@ -330,6 +335,7 @@ type Engine struct {
enableAtModifier bool
enableNegativeOffset bool
enablePerStepStats bool
enableDelayedNameRemoval bool
}
// NewEngine returns a new engine.
@ -420,6 +426,7 @@ func NewEngine(opts EngineOpts) *Engine {
enableAtModifier: opts.EnableAtModifier,
enableNegativeOffset: opts.EnableNegativeOffset,
enablePerStepStats: opts.EnablePerStepStats,
enableDelayedNameRemoval: opts.EnableDelayedNameRemoval,
}
}
@ -712,6 +719,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval
lookbackDelta: s.LookbackDelta,
samplesStats: query.sampleStats,
noStepSubqueryIntervalFn: ng.noStepSubqueryIntervalFn,
enableDelayedNameRemoval: ng.enableDelayedNameRemoval,
}
query.sampleStats.InitStepTracking(start, start, 1)
@ -743,9 +751,9 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval
// Point might have a different timestamp, force it to the evaluation
// timestamp as that is when we ran the evaluation.
if len(s.Histograms) > 0 {
vector[i] = Sample{Metric: s.Metric, H: s.Histograms[0].H, T: start}
vector[i] = Sample{Metric: s.Metric, H: s.Histograms[0].H, T: start, DropName: s.DropName}
} else {
vector[i] = Sample{Metric: s.Metric, F: s.Floats[0].F, T: start}
vector[i] = Sample{Metric: s.Metric, F: s.Floats[0].F, T: start, DropName: s.DropName}
}
}
return vector, warnings, nil
@ -770,6 +778,7 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval
lookbackDelta: s.LookbackDelta,
samplesStats: query.sampleStats,
noStepSubqueryIntervalFn: ng.noStepSubqueryIntervalFn,
enableDelayedNameRemoval: ng.enableDelayedNameRemoval,
}
query.sampleStats.InitStepTracking(evaluator.startTimestamp, evaluator.endTimestamp, evaluator.interval)
val, warnings, err := evaluator.Eval(s.Expr)
@ -1032,6 +1041,7 @@ type evaluator struct {
lookbackDelta time.Duration
samplesStats *stats.QuerySamples
noStepSubqueryIntervalFn func(rangeMillis int64) int64
enableDelayedNameRemoval bool
}
// errorf causes a panic with the input formatted into an error.
@ -1073,6 +1083,9 @@ func (ev *evaluator) Eval(expr parser.Expr) (v parser.Value, ws annotations.Anno
defer ev.recover(expr, &ws, &err)
v, ws = ev.eval(expr)
if ev.enableDelayedNameRemoval {
ev.cleanupMetricLabels(v)
}
return v, ws, nil
}
@ -1101,6 +1114,9 @@ type EvalNodeHelper struct {
rightSigs map[string]Sample
matchedSigs map[string]map[uint64]struct{}
resultMetric map[string]labels.Labels
// Additional options for the evaluation.
enableDelayedNameRemoval bool
}
func (enh *EvalNodeHelper) resetBuilder(lbls labels.Labels) {
@ -1150,7 +1166,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
biggestLen = len(matrixes[i])
}
}
enh := &EvalNodeHelper{Out: make(Vector, 0, biggestLen)}
enh := &EvalNodeHelper{Out: make(Vector, 0, biggestLen), enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
type seriesAndTimestamp struct {
Series
ts int64
@ -1196,12 +1212,12 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
for si, series := range matrixes[i] {
switch {
case len(series.Floats) > 0 && series.Floats[0].T == ts:
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, F: series.Floats[0].F, T: ts})
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, F: series.Floats[0].F, T: ts, DropName: series.DropName})
// Move input vectors forward so we don't have to re-scan the same
// past points at the next step.
matrixes[i][si].Floats = series.Floats[1:]
case len(series.Histograms) > 0 && series.Histograms[0].T == ts:
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, H: series.Histograms[0].H, T: ts})
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, H: series.Histograms[0].H, T: ts, DropName: series.DropName})
matrixes[i][si].Histograms = series.Histograms[1:]
default:
continue
@ -1240,15 +1256,15 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
// If this could be an instant query, shortcut so as not to change sort order.
if ev.endTimestamp == ev.startTimestamp {
if result.ContainsSameLabelset() {
if !ev.enableDelayedNameRemoval && result.ContainsSameLabelset() {
ev.errorf("vector cannot contain metrics with the same labelset")
}
mat := make(Matrix, len(result))
for i, s := range result {
if s.H == nil {
mat[i] = Series{Metric: s.Metric, Floats: []FPoint{{T: ts, F: s.F}}}
mat[i] = Series{Metric: s.Metric, Floats: []FPoint{{T: ts, F: s.F}}, DropName: s.DropName}
} else {
mat[i] = Series{Metric: s.Metric, Histograms: []HPoint{{T: ts, H: s.H}}}
mat[i] = Series{Metric: s.Metric, Histograms: []HPoint{{T: ts, H: s.H}}, DropName: s.DropName}
}
}
ev.currentSamples = originalNumSamples + mat.TotalSamples()
@ -1266,7 +1282,7 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
}
ss.ts = ts
} else {
ss = seriesAndTimestamp{Series{Metric: sample.Metric}, ts}
ss = seriesAndTimestamp{Series{Metric: sample.Metric, DropName: sample.DropName}, ts}
}
addToSeries(&ss.Series, enh.Ts, sample.F, sample.H, numSteps)
seriess[h] = ss
@ -1302,7 +1318,7 @@ func (ev *evaluator) rangeEvalAgg(aggExpr *parser.AggregateExpr, sortedGrouping
var warnings annotations.Annotations
enh := &EvalNodeHelper{}
enh := &EvalNodeHelper{enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
tempNumSamples := ev.currentSamples
// Create a mapping from input series to output groups.
@ -1611,10 +1627,17 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
var prevSS *Series
inMatrix := make(Matrix, 1)
inArgs[matrixArgIndex] = inMatrix
enh := &EvalNodeHelper{Out: make(Vector, 0, 1)}
enh := &EvalNodeHelper{Out: make(Vector, 0, 1), enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
// Process all the calls for one time series at a time.
it := storage.NewBuffer(selRange)
var chkIter chunkenc.Iterator
// The last_over_time function acts like offset; thus, it
// should keep the metric name. For all the other range
// vector functions, the only change needed is to drop the
// metric name in the output.
dropName := e.Func.Name != "last_over_time"
for i, s := range selVS.Series {
if err := contextDone(ev.ctx, "expression evaluation"); err != nil {
ev.error(err)
@ -1629,15 +1652,12 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
chkIter = s.Iterator(chkIter)
it.Reset(chkIter)
metric := selVS.Series[i].Labels()
// The last_over_time function acts like offset; thus, it
// should keep the metric name. For all the other range
// vector functions, the only change needed is to drop the
// metric name in the output.
if e.Func.Name != "last_over_time" {
if !ev.enableDelayedNameRemoval && dropName {
metric = metric.DropMetricName()
}
ss := Series{
Metric: metric,
Metric: metric,
DropName: dropName,
}
inMatrix[0].Metric = selVS.Series[i].Labels()
for ts, step := ev.startTimestamp, -1; ts <= ev.endTimestamp; ts += ev.interval {
@ -1752,16 +1772,16 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
return Matrix{
Series{
Metric: createLabelsForAbsentFunction(e.Args[0]),
Floats: newp,
Metric: createLabelsForAbsentFunction(e.Args[0]),
Floats: newp,
DropName: dropName,
},
}, warnings
}
if mat.ContainsSameLabelset() {
if !ev.enableDelayedNameRemoval && mat.ContainsSameLabelset() {
ev.errorf("vector cannot contain metrics with the same labelset")
}
return mat, warnings
case *parser.ParenExpr:
@ -1772,12 +1792,15 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
mat := val.(Matrix)
if e.Op == parser.SUB {
for i := range mat {
mat[i].Metric = mat[i].Metric.DropMetricName()
if !ev.enableDelayedNameRemoval {
mat[i].Metric = mat[i].Metric.DropMetricName()
}
mat[i].DropName = true
for j := range mat[i].Floats {
mat[i].Floats[j].F = -mat[i].Floats[j].F
}
}
if mat.ContainsSameLabelset() {
if !ev.enableDelayedNameRemoval && mat.ContainsSameLabelset() {
ev.errorf("vector cannot contain metrics with the same labelset")
}
}
@ -1913,6 +1936,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
lookbackDelta: ev.lookbackDelta,
samplesStats: ev.samplesStats.NewChild(),
noStepSubqueryIntervalFn: ev.noStepSubqueryIntervalFn,
enableDelayedNameRemoval: ev.enableDelayedNameRemoval,
}
if e.Step != 0 {
@ -1957,6 +1981,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, annotations.Annotatio
lookbackDelta: ev.lookbackDelta,
samplesStats: ev.samplesStats.NewChild(),
noStepSubqueryIntervalFn: ev.noStepSubqueryIntervalFn,
enableDelayedNameRemoval: ev.enableDelayedNameRemoval,
}
res, ws := newEv.eval(e.Expr)
ev.currentSamples = newEv.currentSamples
@ -2553,7 +2578,7 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
continue
}
metric := resultMetric(ls.Metric, rs.Metric, op, matching, enh)
if returnBool {
if !ev.enableDelayedNameRemoval && returnBool {
metric = metric.DropMetricName()
}
insertedSigs, exists := matchedSigs[sig]
@ -2578,9 +2603,10 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
}
enh.Out = append(enh.Out, Sample{
Metric: metric,
F: floatValue,
H: histogramValue,
Metric: metric,
F: floatValue,
H: histogramValue,
DropName: returnBool,
})
}
return enh.Out, lastErr
@ -2680,7 +2706,10 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala
lhsSample.F = float
lhsSample.H = histogram
if shouldDropMetricName(op) || returnBool {
lhsSample.Metric = lhsSample.Metric.DropMetricName()
if !ev.enableDelayedNameRemoval {
lhsSample.Metric = lhsSample.Metric.DropMetricName()
}
lhsSample.DropName = true
}
enh.Out = append(enh.Out, lhsSample)
}
@ -3019,6 +3048,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix
ss := &outputMatrix[ri]
addToSeries(ss, enh.Ts, aggr.floatValue, aggr.histogramValue, numSteps)
ss.DropName = inputMatrix[ri].DropName
}
return annos
@ -3045,7 +3075,7 @@ seriesLoop:
if !ok {
continue
}
s = Sample{Metric: inputMatrix[si].Metric, F: f}
s = Sample{Metric: inputMatrix[si].Metric, F: f, DropName: inputMatrix[si].DropName}
group := &groups[seriesToResult[si]]
// Initialize this group if it's the first time we've seen it.
@ -3129,16 +3159,16 @@ seriesLoop:
mat = make(Matrix, 0, len(groups))
}
add := func(lbls labels.Labels, f float64) {
add := func(lbls labels.Labels, f float64, dropName bool) {
// If this could be an instant query, add directly to the matrix so the result is in consistent order.
if ev.endTimestamp == ev.startTimestamp {
mat = append(mat, Series{Metric: lbls, Floats: []FPoint{{T: enh.Ts, F: f}}})
mat = append(mat, Series{Metric: lbls, Floats: []FPoint{{T: enh.Ts, F: f}}, DropName: dropName})
} else {
// Otherwise the results are added into seriess elements.
hash := lbls.Hash()
ss, ok := seriess[hash]
if !ok {
ss = Series{Metric: lbls}
ss = Series{Metric: lbls, DropName: dropName}
}
addToSeries(&ss, enh.Ts, f, nil, numSteps)
seriess[hash] = ss
@ -3155,7 +3185,7 @@ seriesLoop:
sort.Sort(sort.Reverse(aggr.heap))
}
for _, v := range aggr.heap {
add(v.Metric, v.F)
add(v.Metric, v.F, v.DropName)
}
case parser.BOTTOMK:
@ -3164,12 +3194,12 @@ seriesLoop:
sort.Sort(sort.Reverse((*vectorByReverseValueHeap)(&aggr.heap)))
}
for _, v := range aggr.heap {
add(v.Metric, v.F)
add(v.Metric, v.F, v.DropName)
}
case parser.LIMITK, parser.LIMIT_RATIO:
for _, v := range aggr.heap {
add(v.Metric, v.F)
add(v.Metric, v.F, v.DropName)
}
}
}
@ -3221,6 +3251,30 @@ func (ev *evaluator) aggregationCountValues(e *parser.AggregateExpr, grouping []
return enh.Out, nil
}
func (ev *evaluator) cleanupMetricLabels(v parser.Value) {
if v.Type() == parser.ValueTypeMatrix {
mat := v.(Matrix)
for i := range mat {
if mat[i].DropName {
mat[i].Metric = mat[i].Metric.DropMetricName()
}
}
if mat.ContainsSameLabelset() {
ev.errorf("vector cannot contain metrics with the same labelset")
}
} else if v.Type() == parser.ValueTypeVector {
vec := v.(Vector)
for i := range vec {
if vec[i].DropName {
vec[i].Metric = vec[i].Metric.DropMetricName()
}
}
if vec.ContainsSameLabelset() {
ev.errorf("vector cannot contain metrics with the same labelset")
}
}
}
func addToSeries(ss *Series, ts int64, f float64, h *histogram.FloatHistogram, numSteps int) {
if h == nil {
if ss.Floats == nil {

29
promql/engine_test.go

@ -1714,7 +1714,8 @@ load 1ms
{F: 3600, T: 6 * 60 * 1000},
{F: 3600, T: 7 * 60 * 1000},
},
Metric: labels.EmptyLabels(),
Metric: labels.EmptyLabels(),
DropName: true,
},
},
},
@ -1930,20 +1931,24 @@ func TestSubquerySelector(t *testing.T) {
nil,
promql.Matrix{
promql.Series{
Floats: []promql.FPoint{{F: 3, T: 7985000}, {F: 3, T: 7990000}, {F: 3, T: 7995000}, {F: 3, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
Floats: []promql.FPoint{{F: 3, T: 7985000}, {F: 3, T: 7990000}, {F: 3, T: 7995000}, {F: 3, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
DropName: true,
},
promql.Series{
Floats: []promql.FPoint{{F: 4, T: 7985000}, {F: 4, T: 7990000}, {F: 4, T: 7995000}, {F: 4, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
Floats: []promql.FPoint{{F: 4, T: 7985000}, {F: 4, T: 7990000}, {F: 4, T: 7995000}, {F: 4, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
DropName: true,
},
promql.Series{
Floats: []promql.FPoint{{F: 1, T: 7985000}, {F: 1, T: 7990000}, {F: 1, T: 7995000}, {F: 1, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
Floats: []promql.FPoint{{F: 1, T: 7985000}, {F: 1, T: 7990000}, {F: 1, T: 7995000}, {F: 1, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
DropName: true,
},
promql.Series{
Floats: []promql.FPoint{{F: 2, T: 7985000}, {F: 2, T: 7990000}, {F: 2, T: 7995000}, {F: 2, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
Floats: []promql.FPoint{{F: 2, T: 7985000}, {F: 2, T: 7990000}, {F: 2, T: 7995000}, {F: 2, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
DropName: true,
},
},
nil,
@ -3470,11 +3475,11 @@ func TestNativeHistogram_MulDivOperator(t *testing.T) {
// histogram * scalar.
queryString := fmt.Sprintf(`%s * %f`, seriesName, c.scalar)
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels(), DropName: true}})
// scalar * histogram.
queryString = fmt.Sprintf(`%f * %s`, c.scalar, seriesName)
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels()}})
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedMul, Metric: labels.EmptyLabels(), DropName: true}})
// histogram * float.
queryString = fmt.Sprintf(`%s * %s`, seriesName, floatSeriesName)
@ -3486,7 +3491,7 @@ func TestNativeHistogram_MulDivOperator(t *testing.T) {
// histogram / scalar.
queryString = fmt.Sprintf(`%s / %f`, seriesName, c.scalar)
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels()}})
queryAndCheck(queryString, []promql.Sample{{T: ts, H: &c.expectedDiv, Metric: labels.EmptyLabels(), DropName: true}})
// histogram / float.
queryString = fmt.Sprintf(`%s / %s`, seriesName, floatSeriesName)

120
promql/functions.go

@ -483,9 +483,13 @@ func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
return enh.Out, nil
}
for _, el := range vec {
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: math.Max(minVal, math.Min(maxVal, el.F)),
Metric: el.Metric,
F: math.Max(minVal, math.Min(maxVal, el.F)),
DropName: true,
})
}
return enh.Out, nil
@ -496,9 +500,13 @@ func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel
vec := vals[0].(Vector)
maxVal := vals[1].(Vector)[0].F
for _, el := range vec {
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: math.Min(maxVal, el.F),
Metric: el.Metric,
F: math.Min(maxVal, el.F),
DropName: true,
})
}
return enh.Out, nil
@ -509,9 +517,13 @@ func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel
vec := vals[0].(Vector)
minVal := vals[1].(Vector)[0].F
for _, el := range vec {
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: math.Max(minVal, el.F),
Metric: el.Metric,
F: math.Max(minVal, el.F),
DropName: true,
})
}
return enh.Out, nil
@ -532,8 +544,9 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
for _, el := range vec {
f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: f,
Metric: el.Metric,
F: f,
DropName: true,
})
}
return enh.Out, nil
@ -882,9 +895,13 @@ func funcPresentOverTime(vals []parser.Value, args parser.Expressions, enh *Eval
func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
for _, el := range vals[0].(Vector) {
if el.H == nil { // Process only float samples.
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: f(el.F),
Metric: el.Metric,
F: f(el.F),
DropName: true,
})
}
}
@ -1028,9 +1045,13 @@ func funcSgn(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper)
func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
vec := vals[0].(Vector)
for _, el := range vec {
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: float64(el.T) / 1000,
Metric: el.Metric,
F: float64(el.T) / 1000,
DropName: true,
})
}
return enh.Out, nil
@ -1137,9 +1158,13 @@ func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalN
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: sample.H.Count,
Metric: sample.Metric,
F: sample.H.Count,
DropName: true,
})
}
return enh.Out, nil
@ -1154,9 +1179,13 @@ func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNod
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: sample.H.Sum,
Metric: sample.Metric,
F: sample.H.Sum,
DropName: true,
})
}
return enh.Out, nil
@ -1171,9 +1200,13 @@ func funcHistogramAvg(vals []parser.Value, args parser.Expressions, enh *EvalNod
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: sample.H.Sum / sample.H.Count,
Metric: sample.Metric,
F: sample.H.Sum / sample.H.Count,
DropName: true,
})
}
return enh.Out, nil
@ -1210,9 +1243,13 @@ func funcHistogramStdDev(vals []parser.Value, args parser.Expressions, enh *Eval
}
variance += cVariance
variance /= sample.H.Count
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: math.Sqrt(variance),
Metric: sample.Metric,
F: math.Sqrt(variance),
DropName: true,
})
}
return enh.Out, nil
@ -1249,9 +1286,13 @@ func funcHistogramStdVar(vals []parser.Value, args parser.Expressions, enh *Eval
}
variance += cVariance
variance /= sample.H.Count
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: variance,
Metric: sample.Metric,
F: variance,
DropName: true,
})
}
return enh.Out, nil
@ -1268,9 +1309,13 @@ func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *Ev
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: histogramFraction(lower, upper, sample.H),
Metric: sample.Metric,
F: histogramFraction(lower, upper, sample.H),
DropName: true,
})
}
return enh.Out, nil
@ -1338,9 +1383,13 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric.DropMetricName(),
F: histogramQuantile(q, sample.H),
Metric: sample.Metric,
F: histogramQuantile(q, sample.H),
DropName: true,
})
}
@ -1442,6 +1491,11 @@ func (ev *evaluator) evalLabelReplace(args parser.Expressions) (parser.Value, an
lb.Reset(el.Metric)
lb.Set(dst, string(res))
matrix[i].Metric = lb.Labels()
if dst == model.MetricNameLabel {
matrix[i].DropName = false
} else {
matrix[i].DropName = el.DropName
}
}
}
if matrix.ContainsSameLabelset() {
@ -1496,6 +1550,12 @@ func (ev *evaluator) evalLabelJoin(args parser.Expressions) (parser.Value, annot
lb.Reset(el.Metric)
lb.Set(dst, strval)
matrix[i].Metric = lb.Labels()
if dst == model.MetricNameLabel {
matrix[i].DropName = false
} else {
matrix[i].DropName = el.DropName
}
}
return matrix, ws
@ -1518,9 +1578,13 @@ func dateWrapper(vals []parser.Value, enh *EvalNodeHelper, f func(time.Time) flo
for _, el := range vals[0].(Vector) {
t := time.Unix(int64(el.F), 0).UTC()
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric.DropMetricName(),
F: f(t),
Metric: el.Metric,
F: f(t),
DropName: true,
})
}
return enh.Out

2
promql/promqltest/test.go

@ -90,6 +90,7 @@ func NewTestEngine(enablePerStepStats bool, lookbackDelta time.Duration, maxSamp
EnableNegativeOffset: true,
EnablePerStepStats: enablePerStepStats,
LookbackDelta: lookbackDelta,
EnableDelayedNameRemoval: true,
})
}
@ -1362,6 +1363,7 @@ func (ll *LazyLoader) clear() error {
NoStepSubqueryIntervalFn: func(int64) int64 { return durationMilliseconds(ll.SubqueryInterval) },
EnableAtModifier: ll.opts.EnableAtModifier,
EnableNegativeOffset: ll.opts.EnableNegativeOffset,
EnableDelayedNameRemoval: true,
}
ll.queryEngine = promql.NewEngine(opts)

84
promql/promqltest/testdata/name_label_dropping.test vendored

@ -0,0 +1,84 @@
# Test for __name__ label drop.
load 5m
metric{env="1"} 0 60 120
another_metric{env="1"} 60 120 180
# Does not drop __name__ for vector selector
eval instant at 15m metric{env="1"}
metric{env="1"} 120
# Drops __name__ for unary operators
eval instant at 15m -metric
{env="1"} -120
# Drops __name__ for binary operators
eval instant at 15m metric + another_metric
{env="1"} 300
# Does not drop __name__ for binary comparison operators
eval instant at 15m metric <= another_metric
metric{env="1"} 120
# Drops __name__ for binary comparison operators with "bool" modifier
eval instant at 15m metric <= bool another_metric
{env="1"} 1
# Drops __name__ for vector-scalar operations
eval instant at 15m metric * 2
{env="1"} 240
# Drops __name__ for instant-vector functions
eval instant at 15m clamp(metric, 0, 100)
{env="1"} 100
# Drops __name__ for range-vector functions
eval instant at 15m rate(metric{env="1"}[10m])
{env="1"} 0.2
# Does not drop __name__ for last_over_time function
eval instant at 15m last_over_time(metric{env="1"}[10m])
metric{env="1"} 120
# Drops name for other _over_time functions
eval instant at 15m max_over_time(metric{env="1"}[10m])
{env="1"} 120
# Allows relabeling (to-be-dropped) __name__ via label_replace
eval instant at 15m label_replace(rate({env="1"}[10m]), "my_name", "rate_$1", "__name__", "(.+)")
{my_name="rate_metric", env="1"} 0.2
{my_name="rate_another_metric", env="1"} 0.2
# Allows preserving __name__ via label_replace
eval instant at 15m label_replace(rate({env="1"}[10m]), "__name__", "rate_$1", "__name__", "(.+)")
rate_metric{env="1"} 0.2
rate_another_metric{env="1"} 0.2
# Allows relabeling (to-be-dropped) __name__ via label_join
eval instant at 15m label_join(rate({env="1"}[10m]), "my_name", "_", "__name__")
{my_name="metric", env="1"} 0.2
{my_name="another_metric", env="1"} 0.2
# Allows preserving __name__ via label_join
eval instant at 15m label_join(rate({env="1"}[10m]), "__name__", "_", "__name__", "env")
metric_1{env="1"} 0.2
another_metric_1{env="1"} 0.2
# Does not drop metric names fro aggregation operators
eval instant at 15m sum by (__name__, env) (metric{env="1"})
metric{env="1"} 120
# Aggregation operators by __name__ lead to duplicate labelset errors (aggregation is partitioned by not yet removed __name__ label)
# This is an accidental side effect of delayed __name__ label dropping
eval_fail instant at 15m sum by (__name__) (rate({env="1"}[10m]))
# Aggregation operators aggregate metrics with same labelset and to-be-dropped names
# This is an accidental side effect of delayed __name__ label dropping
eval instant at 15m sum(rate({env="1"}[10m])) by (env)
{env="1"} 0.4
# Aggregationk operators propagate __name__ label dropping information
eval instant at 15m topk(10, sum by (__name__, env) (metric{env="1"}))
metric{env="1"} 120
eval instant at 15m topk(10, sum by (__name__, env) (rate(metric{env="1"}[10m])))
{env="1"} 0.2

6
promql/value.go

@ -68,6 +68,9 @@ type Series struct {
Metric labels.Labels `json:"metric"`
Floats []FPoint `json:"values,omitempty"`
Histograms []HPoint `json:"histograms,omitempty"`
// DropName is used to indicate whether the __name__ label should be dropped
// as part of the query evaluation.
DropName bool `json:"-"`
}
func (s Series) String() string {
@ -194,6 +197,9 @@ type Sample struct {
H *histogram.FloatHistogram
Metric labels.Labels
// DropName is used to indicate whether the __name__ label should be dropped
// as part of the query evaluation.
DropName bool
}
func (s Sample) String() string {

Loading…
Cancel
Save