Browse Source

scrape: Rewrite scrape loop options as a struct (#5314)

Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
pull/5324/head
Julien Pivotto 6 years ago committed by Brian Brazil
parent
commit
04ce817c49
  1. 2
      scrape/manager_test.go
  2. 42
      scrape/scrape.go
  3. 15
      scrape/scrape_test.go

2
scrape/manager_test.go

@ -274,7 +274,7 @@ scrape_configs:
)
scrapeManager := NewManager(nil, nil)
newLoop := func(_ *Target, s scraper, _ int, _ bool, _ []*relabel.Config) loop {
newLoop := func(scrapeLoopOptions) loop {
ch <- struct{}{}
return noopLoop()
}

42
scrape/scrape.go

@ -158,7 +158,15 @@ type scrapePool struct {
cancel context.CancelFunc
// Constructor for new scrape loops. This is settable for testing convenience.
newLoop func(*Target, scraper, int, bool, []*relabel.Config) loop
newLoop func(scrapeLoopOptions) loop
}
type scrapeLoopOptions struct {
target *Target
scraper scraper
limit int
honorLabels bool
mrc []*relabel.Config
}
const maxAheadTime = 10 * time.Minute
@ -189,24 +197,26 @@ func newScrapePool(cfg *config.ScrapeConfig, app Appendable, logger log.Logger)
loops: map[uint64]loop{},
logger: logger,
}
sp.newLoop = func(t *Target, s scraper, limit int, honor bool, mrc []*relabel.Config) loop {
sp.newLoop = func(opts scrapeLoopOptions) loop {
// Update the targets retrieval function for metadata to a new scrape cache.
cache := newScrapeCache()
t.setMetadataStore(cache)
opts.target.setMetadataStore(cache)
return newScrapeLoop(
ctx,
s,
log.With(logger, "target", t),
opts.scraper,
log.With(logger, "target", opts.target),
buffers,
func(l labels.Labels) labels.Labels { return mutateSampleLabels(l, t, honor, mrc) },
func(l labels.Labels) labels.Labels { return mutateReportSampleLabels(l, t) },
func(l labels.Labels) labels.Labels {
return mutateSampleLabels(l, opts.target, opts.honorLabels, opts.mrc)
},
func(l labels.Labels) labels.Labels { return mutateReportSampleLabels(l, opts.target) },
func() storage.Appender {
app, err := app.Appender()
if err != nil {
panic(err)
}
return appender(app, limit)
return appender(app, opts.limit)
},
cache,
)
@ -285,7 +295,13 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
var (
t = sp.activeTargets[fp]
s = &targetScraper{Target: t, client: sp.client, timeout: timeout}
newLoop = sp.newLoop(t, s, limit, honor, mrc)
newLoop = sp.newLoop(scrapeLoopOptions{
target: t,
scraper: s,
limit: limit,
honorLabels: honor,
mrc: mrc,
})
)
wg.Add(1)
@ -360,7 +376,13 @@ func (sp *scrapePool) sync(targets []*Target) {
if _, ok := sp.activeTargets[hash]; !ok {
s := &targetScraper{Target: t, client: sp.client, timeout: timeout}
l := sp.newLoop(t, s, limit, honor, mrc)
l := sp.newLoop(scrapeLoopOptions{
target: t,
scraper: s,
limit: limit,
honorLabels: honor,
mrc: mrc,
})
sp.activeTargets[hash] = t
sp.loops[hash] = l

15
scrape/scrape_test.go

@ -219,7 +219,7 @@ func TestScrapePoolReload(t *testing.T) {
}
// On starting to run, new loops created on reload check whether their preceding
// equivalents have been stopped.
newLoop := func(_ *Target, s scraper, _ int, _ bool, _ []*relabel.Config) loop {
newLoop := func(opts scrapeLoopOptions) loop {
l := &testLoop{}
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
if interval != 3*time.Second {
@ -229,8 +229,8 @@ func TestScrapePoolReload(t *testing.T) {
t.Errorf("Expected scrape timeout %d but got %d", 2*time.Second, timeout)
}
mtx.Lock()
if !stopped[s.(*targetScraper).hash()] {
t.Errorf("Scrape loop for %v not stopped yet", s.(*targetScraper))
if !stopped[opts.scraper.(*targetScraper).hash()] {
t.Errorf("Scrape loop for %v not stopped yet", opts.scraper.(*targetScraper))
}
mtx.Unlock()
}
@ -307,7 +307,9 @@ func TestScrapePoolAppender(t *testing.T) {
app := &nopAppendable{}
sp, _ := newScrapePool(cfg, app, nil)
loop := sp.newLoop(&Target{}, nil, 0, false, nil)
loop := sp.newLoop(scrapeLoopOptions{
target: &Target{},
})
appl, ok := loop.(*scrapeLoop)
if !ok {
t.Fatalf("Expected scrapeLoop but got %T", loop)
@ -322,7 +324,10 @@ func TestScrapePoolAppender(t *testing.T) {
t.Fatalf("Expected base appender but got %T", tl.Appender)
}
loop = sp.newLoop(&Target{}, nil, 100, false, nil)
loop = sp.newLoop(scrapeLoopOptions{
target: &Target{},
limit: 100,
})
appl, ok = loop.(*scrapeLoop)
if !ok {
t.Fatalf("Expected scrapeLoop but got %T", loop)

Loading…
Cancel
Save