scrape: in tests use labels.FromStrings

And a few cases of `EmptyLabels()`.
Replacing code which assumes the internal structure of `Labels`.

Signed-off-by: Bryan Boreham <bjboreham@gmail.com>
pull/11292/head
Bryan Boreham 3 years ago committed by Julien Pivotto
parent ac02cfcb79
commit 14780c3b4e

@ -149,8 +149,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "no address", err: "no address",
}, },
// Address label missing, but added in relabelling. // Address label missing, but added in relabelling.
@ -242,8 +242,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "invalid label value for \"custom\": \"\\xbd\"", err: "invalid label value for \"custom\": \"\\xbd\"",
}, },
// Invalid duration in interval label. // Invalid duration in interval label.
@ -259,8 +259,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"", err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
}, },
// Invalid duration in timeout label. // Invalid duration in timeout label.
@ -276,8 +276,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"", err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
}, },
// 0 interval in timeout label. // 0 interval in timeout label.
@ -293,8 +293,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "scrape interval cannot be 0", err: "scrape interval cannot be 0",
}, },
// 0 duration in timeout label. // 0 duration in timeout label.
@ -310,8 +310,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "scrape timeout cannot be 0", err: "scrape timeout cannot be 0",
}, },
// Timeout less than interval. // Timeout less than interval.
@ -328,8 +328,8 @@ func TestPopulateLabels(t *testing.T) {
ScrapeInterval: model.Duration(time.Second), ScrapeInterval: model.Duration(time.Second),
ScrapeTimeout: model.Duration(time.Second), ScrapeTimeout: model.Duration(time.Second),
}, },
res: nil, res: labels.EmptyLabels(),
resOrig: nil, resOrig: labels.EmptyLabels(),
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")", err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
}, },
// Don't attach default port. // Don't attach default port.

@ -116,22 +116,12 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
} }
sp.activeTargets = make(map[uint64]*Target) sp.activeTargets = make(map[uint64]*Target)
t1 := &Target{ t1 := &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.FromStrings("label", "name"),
labels.Label{
Name: "label",
Value: "name",
},
},
} }
sp.activeTargets[t1.hash()] = t1 sp.activeTargets[t1.hash()] = t1
t2 := &Target{ t2 := &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
labels.Label{
Name: "labelNew",
Value: "nameNew",
},
},
} }
sp.sync([]*Target{t2}) sp.sync([]*Target{t2})
@ -1587,7 +1577,7 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
nil, nil, nil, nil, nil, nil,
func(l labels.Labels) labels.Labels { func(l labels.Labels) labels.Labels {
if l.Has("deleteme") { if l.Has("deleteme") {
return nil return labels.EmptyLabels()
} }
return l return l
}, },
@ -2517,7 +2507,7 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) {
nil, nil, nil, nil,
func(l labels.Labels) labels.Labels { func(l labels.Labels) labels.Labels {
if l.Has("drop") { if l.Has("drop") {
return labels.Labels{} return labels.FromStrings("no", "name") // This label set will trigger an error.
} }
return l return l
}, },
@ -2628,20 +2618,7 @@ func TestReuseScrapeCache(t *testing.T) {
} }
sp, _ = newScrapePool(cfg, app, 0, nil, &Options{}) sp, _ = newScrapePool(cfg, app, 0, nil, &Options{})
t1 = &Target{ t1 = &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
labels.Label{
Name: "labelNew",
Value: "nameNew",
},
labels.Label{
Name: "labelNew1",
Value: "nameNew1",
},
labels.Label{
Name: "labelNew2",
Value: "nameNew2",
},
},
} }
proxyURL, _ = url.Parse("http://localhost:2128") proxyURL, _ = url.Parse("http://localhost:2128")
) )
@ -2841,12 +2818,7 @@ func TestReuseCacheRace(t *testing.T) {
} }
sp, _ = newScrapePool(cfg, app, 0, nil, &Options{}) sp, _ = newScrapePool(cfg, app, 0, nil, &Options{})
t1 = &Target{ t1 = &Target{
discoveredLabels: labels.Labels{ discoveredLabels: labels.FromStrings("labelNew", "nameNew"),
labels.Label{
Name: "labelNew",
Value: "nameNew",
},
},
} }
) )
defer sp.stop() defer sp.stop()

Loading…
Cancel
Save