Browse Source

Scrape: do not put staleness marker when cache is reused (#7011)

* Scrape: do not put staleness marker when cache is reused

Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
pull/7017/head
Julien Pivotto 5 years ago committed by GitHub
parent
commit
d6ad5551c9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      scrape/scrape.go
  2. 39
      scrape/scrape_test.go

12
scrape/scrape.go

@ -314,6 +314,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
for fp, oldLoop := range sp.loops {
var cache *scrapeCache
if oc := oldLoop.getCache(); reuseCache && oc != nil {
oldLoop.disableEndOfRunStalenessMarkers()
cache = oc
} else {
cache = newScrapeCache()
@ -593,6 +594,7 @@ type loop interface {
run(interval, timeout time.Duration, errc chan<- error)
stop()
getCache() *scrapeCache
disableEndOfRunStalenessMarkers()
}
type cacheEntry struct {
@ -619,6 +621,8 @@ type scrapeLoop struct {
ctx context.Context
cancel func()
stopped chan struct{}
disabledEndOfRunStalenessMarkers bool
}
// scrapeCache tracks mappings of exposed metric strings to label sets and
@ -996,7 +1000,9 @@ mainLoop:
close(sl.stopped)
sl.endOfRunStaleness(last, ticker, interval)
if !sl.disabledEndOfRunStalenessMarkers {
sl.endOfRunStaleness(last, ticker, interval)
}
}
func (sl *scrapeLoop) endOfRunStaleness(last time.Time, ticker *time.Ticker, interval time.Duration) {
@ -1054,6 +1060,10 @@ func (sl *scrapeLoop) stop() {
<-sl.stopped
}
func (sl *scrapeLoop) disableEndOfRunStalenessMarkers() {
sl.disabledEndOfRunStalenessMarkers = true
}
func (sl *scrapeLoop) getCache() *scrapeCache {
return sl.cache
}

39
scrape/scrape_test.go

@ -141,6 +141,9 @@ func (l *testLoop) run(interval, timeout time.Duration, errc chan<- error) {
l.startFunc(interval, timeout, errc)
}
func (l *testLoop) disableEndOfRunStalenessMarkers() {
}
func (l *testLoop) stop() {
l.stopFunc()
}
@ -1839,3 +1842,39 @@ func TestScrapeAddFast(t *testing.T) {
_, _, _, err = sl.append([]byte("up 1\n"), "", time.Time{}.Add(time.Second))
testutil.Ok(t, err)
}
func TestReuseCacheRace(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
}
sp, _ = newScrapePool(cfg, app, 0, nil)
t1 = &Target{
discoveredLabels: labels.Labels{
labels.Label{
Name: "labelNew",
Value: "nameNew",
},
},
}
)
sp.sync([]*Target{t1})
start := time.Now()
for i := uint(1); i > 0; i++ {
if time.Since(start) > 5*time.Second {
break
}
sp.reload(&config.ScrapeConfig{
JobName: "Prometheus",
ScrapeTimeout: model.Duration(1 * time.Millisecond),
ScrapeInterval: model.Duration(1 * time.Millisecond),
MetricsPath: "/metrics",
SampleLimit: i,
})
}
}

Loading…
Cancel
Save