From 90e832c861df33e17b7240ddf1d57441860953dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Giedrius=20Statkevi=C4=8Dius?= Date: Mon, 2 Dec 2024 16:08:37 +0200 Subject: [PATCH] scrape: fix nil panic after scrape loop reload MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Don't forget to set `metrics` field as otherwise scraping will lead to a nil panic in case the body size limit is reached. Signed-off-by: Giedrius Statkevičius --- scrape/scrape.go | 1 + scrape/scrape_test.go | 55 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/scrape/scrape.go b/scrape/scrape.go index eeab208d6..47ed73c05 100644 --- a/scrape/scrape.go +++ b/scrape/scrape.go @@ -359,6 +359,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) { bodySizeLimit: bodySizeLimit, acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme), acceptEncodingHeader: acceptEncodingHeader(enableCompression), + metrics: sp.metrics, } newLoop = sp.newLoop(scrapeLoopOptions{ target: t, diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index ce1686fec..e8494e2db 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -4794,3 +4794,58 @@ func newScrapableServer(scrapeText string) (s *httptest.Server, scrapedTwice cha } })), scrapedTwice } + +type srvSameResp struct { + resp []byte +} + +func (s *srvSameResp) ServeHTTP(w http.ResponseWriter, r *http.Request) { + w.Write(s.resp) +} + +func TestTargetScraperSegfault(t *testing.T) { + emptySrv := &srvSameResp{ + resp: []byte{0x42, 0x42}, + } + h := httptest.NewServer(emptySrv) + t.Cleanup(h.Close) + + cfg := &config.ScrapeConfig{ + BodySizeLimit: 1, + JobName: "test", + Scheme: "http", + ScrapeInterval: model.Duration(100 * time.Millisecond), + ScrapeTimeout: model.Duration(100 * time.Millisecond), + EnableCompression: false, + ServiceDiscoveryConfigs: discovery.Configs{ + &discovery.StaticConfig{ + { + Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(h.URL)}}, + }, + }, + }, + } + + p, err := newScrapePool( + cfg, + &nopAppendable{}, + 0, + nil, + pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) }), + &Options{}, + newTestScrapeMetrics(t), + ) + require.NoError(t, err) + t.Cleanup(p.stop) + + p.Sync([]*targetgroup.Group{ + { + Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(strings.TrimPrefix(h.URL, "http://"))}}, + Source: "test", + }, + }) + + require.NoError(t, p.reload(cfg)) + + <-time.After(1 * time.Second) +}