|
|
|
@ -300,8 +300,6 @@ const resolvedRetention = 15 * time.Minute
|
|
|
|
|
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc, externalURL *url.URL) (promql.Vector, error) { |
|
|
|
|
res, err := query(ctx, r.vector.String(), ts) |
|
|
|
|
if err != nil { |
|
|
|
|
r.SetHealth(HealthBad) |
|
|
|
|
r.SetLastError(err) |
|
|
|
|
return nil, err |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -366,12 +364,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
|
|
|
|
|
resultFPs[h] = struct{}{} |
|
|
|
|
|
|
|
|
|
if _, ok := alerts[h]; ok { |
|
|
|
|
err = fmt.Errorf("vector contains metrics with the same labelset after applying alert labels") |
|
|
|
|
// We have already acquired the lock above hence using SetHealth and
|
|
|
|
|
// SetLastError will deadlock.
|
|
|
|
|
r.health = HealthBad |
|
|
|
|
r.lastError = err |
|
|
|
|
return nil, err |
|
|
|
|
return nil, fmt.Errorf("vector contains metrics with the same labelset after applying alert labels") |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
alerts[h] = &Alert{ |
|
|
|
@ -421,10 +414,6 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, query QueryFunc,
|
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// We have already acquired the lock above hence using SetHealth and
|
|
|
|
|
// SetLastError will deadlock.
|
|
|
|
|
r.health = HealthGood |
|
|
|
|
r.lastError = err |
|
|
|
|
return vec, nil |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|