mirror of https://github.com/prometheus/prometheus
Merge pull request #15563 from GiedriusS/fix_setting_field
scrape: fix nil panic after scrape loop reloadpull/15513/head
commit
c69aeabf1d
|
@ -361,6 +361,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
||||||
bodySizeLimit: bodySizeLimit,
|
bodySizeLimit: bodySizeLimit,
|
||||||
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
|
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
|
||||||
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
||||||
|
metrics: sp.metrics,
|
||||||
}
|
}
|
||||||
newLoop = sp.newLoop(scrapeLoopOptions{
|
newLoop = sp.newLoop(scrapeLoopOptions{
|
||||||
target: t,
|
target: t,
|
||||||
|
|
|
@ -4831,3 +4831,44 @@ func newScrapableServer(scrapeText string) (s *httptest.Server, scrapedTwice cha
|
||||||
}
|
}
|
||||||
})), scrapedTwice
|
})), scrapedTwice
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Regression test for the panic fixed in https://github.com/prometheus/prometheus/pull/15523.
|
||||||
|
func TestScrapePoolScrapeAfterReload(t *testing.T) {
|
||||||
|
h := httptest.NewServer(http.HandlerFunc(
|
||||||
|
func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Write([]byte{0x42, 0x42})
|
||||||
|
},
|
||||||
|
))
|
||||||
|
t.Cleanup(h.Close)
|
||||||
|
|
||||||
|
cfg := &config.ScrapeConfig{
|
||||||
|
BodySizeLimit: 1,
|
||||||
|
JobName: "test",
|
||||||
|
Scheme: "http",
|
||||||
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
|
EnableCompression: false,
|
||||||
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
|
&discovery.StaticConfig{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(h.URL)}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
p, err := newScrapePool(cfg, &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
|
require.NoError(t, err)
|
||||||
|
t.Cleanup(p.stop)
|
||||||
|
|
||||||
|
p.Sync([]*targetgroup.Group{
|
||||||
|
{
|
||||||
|
Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(strings.TrimPrefix(h.URL, "http://"))}},
|
||||||
|
Source: "test",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
require.NoError(t, p.reload(cfg))
|
||||||
|
|
||||||
|
<-time.After(1 * time.Second)
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue