|
|
|
@ -139,7 +139,7 @@ func TestScrapePoolReload(t *testing.T) {
|
|
|
|
|
} |
|
|
|
|
// On starting to run, new loops created on reload check whether their preceding
|
|
|
|
|
// equivalents have been stopped.
|
|
|
|
|
newLoop := func(ctx context.Context, s scraper, app storage.SampleAppender, mut func(storage.SampleAppender) storage.SampleAppender, reportApp storage.SampleAppender, sampleLimit uint) loop { |
|
|
|
|
newLoop := func(ctx context.Context, s scraper, app storage.SampleAppender, tl model.LabelSet, cfg *config.ScrapeConfig) loop { |
|
|
|
|
l := &testLoop{} |
|
|
|
|
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) { |
|
|
|
|
if interval != 3*time.Second { |
|
|
|
@ -222,54 +222,42 @@ func TestScrapePoolReload(t *testing.T) {
|
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestScrapePoolReportAppender(t *testing.T) { |
|
|
|
|
func TestScrapeLoopWrapSampleAppender(t *testing.T) { |
|
|
|
|
cfg := &config.ScrapeConfig{ |
|
|
|
|
MetricRelabelConfigs: []*config.RelabelConfig{ |
|
|
|
|
{}, {}, {}, |
|
|
|
|
{ |
|
|
|
|
Action: config.RelabelDrop, |
|
|
|
|
SourceLabels: model.LabelNames{"__name__"}, |
|
|
|
|
Regex: config.MustNewRegexp("does_not_match_.*"), |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Action: config.RelabelDrop, |
|
|
|
|
SourceLabels: model.LabelNames{"__name__"}, |
|
|
|
|
Regex: config.MustNewRegexp("does_not_match_either_*"), |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
target := newTestTarget("example.com:80", 10*time.Millisecond, nil) |
|
|
|
|
app := &nopAppender{} |
|
|
|
|
|
|
|
|
|
sp := newScrapePool(context.Background(), cfg, app) |
|
|
|
|
|
|
|
|
|
cfg.HonorLabels = false |
|
|
|
|
wrapped := sp.reportAppender(target) |
|
|
|
|
|
|
|
|
|
rl, ok := wrapped.(ruleLabelsAppender) |
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected ruleLabelsAppender but got %T", wrapped) |
|
|
|
|
} |
|
|
|
|
if rl.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", rl.SampleAppender) |
|
|
|
|
} |
|
|
|
|
sl := sp.newLoop( |
|
|
|
|
sp.ctx, |
|
|
|
|
&targetScraper{Target: target, client: sp.client}, |
|
|
|
|
sp.appender, |
|
|
|
|
target.Labels(), |
|
|
|
|
sp.config, |
|
|
|
|
).(*scrapeLoop) |
|
|
|
|
wrapped, counting := sl.wrapAppender(sl.appender) |
|
|
|
|
wrapped.Append(&model.Sample{}) |
|
|
|
|
|
|
|
|
|
cfg.HonorLabels = true |
|
|
|
|
wrapped = sp.reportAppender(target) |
|
|
|
|
|
|
|
|
|
hl, ok := wrapped.(ruleLabelsAppender) |
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected ruleLabelsAppender but got %T", wrapped) |
|
|
|
|
if counting.count != 1 { |
|
|
|
|
t.Errorf("Expected count of 1, got %d", counting.count) |
|
|
|
|
} |
|
|
|
|
if hl.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", hl.SampleAppender) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func TestScrapePoolSampleAppender(t *testing.T) { |
|
|
|
|
cfg := &config.ScrapeConfig{ |
|
|
|
|
MetricRelabelConfigs: []*config.RelabelConfig{ |
|
|
|
|
{}, {}, {}, |
|
|
|
|
}, |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
target := newTestTarget("example.com:80", 10*time.Millisecond, nil) |
|
|
|
|
app := &nopAppender{} |
|
|
|
|
|
|
|
|
|
sp := newScrapePool(context.Background(), cfg, app) |
|
|
|
|
|
|
|
|
|
cfg.HonorLabels = false |
|
|
|
|
wrapped := sp.sampleMutator(target)(app) |
|
|
|
|
|
|
|
|
|
rl, ok := wrapped.(ruleLabelsAppender) |
|
|
|
|
if !ok { |
|
|
|
@ -279,12 +267,28 @@ func TestScrapePoolSampleAppender(t *testing.T) {
|
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected relabelAppender but got %T", rl.SampleAppender) |
|
|
|
|
} |
|
|
|
|
if re.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", re.SampleAppender) |
|
|
|
|
co, ok := re.SampleAppender.(*countingAppender) |
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected *countingAppender but got %T", re.SampleAppender) |
|
|
|
|
} |
|
|
|
|
if co.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", co.SampleAppender) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
cfg.HonorLabels = true |
|
|
|
|
wrapped = sp.sampleMutator(target)(app) |
|
|
|
|
sl = sp.newLoop( |
|
|
|
|
sp.ctx, |
|
|
|
|
&targetScraper{Target: target, client: sp.client}, |
|
|
|
|
sp.appender, |
|
|
|
|
target.Labels(), |
|
|
|
|
sp.config, |
|
|
|
|
).(*scrapeLoop) |
|
|
|
|
wrapped, counting = sl.wrapAppender(sl.appender) |
|
|
|
|
wrapped.Append(&model.Sample{}) |
|
|
|
|
|
|
|
|
|
if counting.count != 1 { |
|
|
|
|
t.Errorf("Expected count of 1, got %d", counting.count) |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
hl, ok := wrapped.(honorLabelsAppender) |
|
|
|
|
if !ok { |
|
|
|
@ -294,8 +298,12 @@ func TestScrapePoolSampleAppender(t *testing.T) {
|
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected relabelAppender but got %T", hl.SampleAppender) |
|
|
|
|
} |
|
|
|
|
if re.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", re.SampleAppender) |
|
|
|
|
co, ok = re.SampleAppender.(*countingAppender) |
|
|
|
|
if !ok { |
|
|
|
|
t.Fatalf("Expected *countingAppender but got %T", re.SampleAppender) |
|
|
|
|
} |
|
|
|
|
if co.SampleAppender != app { |
|
|
|
|
t.Fatalf("Expected base appender but got %T", co.SampleAppender) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -310,15 +318,14 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
testCases := []struct { |
|
|
|
|
scrapedSamples model.Samples |
|
|
|
|
scrapeError error |
|
|
|
|
scrapeConfig config.ScrapeConfig |
|
|
|
|
expectedReportedSamples model.Samples |
|
|
|
|
expectedIngestedSamplesCount int |
|
|
|
|
scrapedSamples model.Samples |
|
|
|
|
scrapeConfig *config.ScrapeConfig |
|
|
|
|
expectedReportedSamples model.Samples |
|
|
|
|
expectedPostRelabelSamplesCount int |
|
|
|
|
}{ |
|
|
|
|
{ |
|
|
|
|
{ // 0
|
|
|
|
|
scrapedSamples: readSamples, |
|
|
|
|
scrapeError: nil, |
|
|
|
|
scrapeConfig: &config.ScrapeConfig{}, |
|
|
|
|
expectedReportedSamples: model.Samples{ |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "up"}, |
|
|
|
@ -326,6 +333,7 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_duration_seconds"}, |
|
|
|
|
Value: 42, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_scraped"}, |
|
|
|
@ -336,12 +344,11 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
Value: 2, |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
expectedIngestedSamplesCount: 2, |
|
|
|
|
expectedPostRelabelSamplesCount: 2, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
{ // 1
|
|
|
|
|
scrapedSamples: readSamples, |
|
|
|
|
scrapeError: nil, |
|
|
|
|
scrapeConfig: config.ScrapeConfig{ |
|
|
|
|
scrapeConfig: &config.ScrapeConfig{ |
|
|
|
|
MetricRelabelConfigs: []*config.RelabelConfig{ |
|
|
|
|
{ |
|
|
|
|
Action: config.RelabelDrop, |
|
|
|
@ -357,6 +364,7 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_duration_seconds"}, |
|
|
|
|
Value: 42, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_scraped"}, |
|
|
|
@ -367,12 +375,11 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
Value: 1, |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
expectedIngestedSamplesCount: 1, |
|
|
|
|
expectedPostRelabelSamplesCount: 1, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
{ // 2
|
|
|
|
|
scrapedSamples: readSamples, |
|
|
|
|
scrapeError: nil, |
|
|
|
|
scrapeConfig: config.ScrapeConfig{ |
|
|
|
|
scrapeConfig: &config.ScrapeConfig{ |
|
|
|
|
SampleLimit: 1, |
|
|
|
|
MetricRelabelConfigs: []*config.RelabelConfig{ |
|
|
|
|
{ |
|
|
|
@ -389,6 +396,7 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_duration_seconds"}, |
|
|
|
|
Value: 42, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_scraped"}, |
|
|
|
@ -399,12 +407,11 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
Value: 1, |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
expectedIngestedSamplesCount: 1, |
|
|
|
|
expectedPostRelabelSamplesCount: 1, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
{ // 3
|
|
|
|
|
scrapedSamples: readSamples, |
|
|
|
|
scrapeError: nil, |
|
|
|
|
scrapeConfig: config.ScrapeConfig{ |
|
|
|
|
scrapeConfig: &config.ScrapeConfig{ |
|
|
|
|
SampleLimit: 1, |
|
|
|
|
}, |
|
|
|
|
expectedReportedSamples: model.Samples{ |
|
|
|
@ -414,6 +421,7 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_duration_seconds"}, |
|
|
|
|
Value: 42, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_scraped"}, |
|
|
|
@ -424,53 +432,31 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
Value: 2, |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
expectedIngestedSamplesCount: 0, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
scrapedSamples: model.Samples{}, |
|
|
|
|
scrapeError: fmt.Errorf("error"), |
|
|
|
|
expectedReportedSamples: model.Samples{ |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "up"}, |
|
|
|
|
Value: 0, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_duration_seconds"}, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_scraped"}, |
|
|
|
|
Value: 0, |
|
|
|
|
}, |
|
|
|
|
{ |
|
|
|
|
Metric: model.Metric{"__name__": "scrape_samples_post_metric_relabeling"}, |
|
|
|
|
Value: 0, |
|
|
|
|
}, |
|
|
|
|
}, |
|
|
|
|
expectedIngestedSamplesCount: 0, |
|
|
|
|
expectedPostRelabelSamplesCount: 2, |
|
|
|
|
}, |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for i, test := range testCases { |
|
|
|
|
ingestedSamples := &bufferAppender{buffer: model.Samples{}} |
|
|
|
|
reportedSamples := &bufferAppender{buffer: model.Samples{}} |
|
|
|
|
|
|
|
|
|
target := newTestTarget("example.com:80", 10*time.Millisecond, nil) |
|
|
|
|
sp := newScrapePool(context.Background(), &test.scrapeConfig, ingestedSamples) |
|
|
|
|
|
|
|
|
|
scraper := &testScraper{} |
|
|
|
|
sl := newScrapeLoop(context.Background(), scraper, ingestedSamples, sp.sampleMutator(target), reportedSamples, test.scrapeConfig.SampleLimit).(*scrapeLoop) |
|
|
|
|
sl.processScrapeResult(test.scrapedSamples, test.scrapeError, time.Unix(0, 0)) |
|
|
|
|
|
|
|
|
|
// Ignore value of scrape_duration_seconds, as it's time dependant.
|
|
|
|
|
reportedSamples.buffer[1].Value = 0 |
|
|
|
|
sl := newScrapeLoop(context.Background(), scraper, ingestedSamples, target.Labels(), test.scrapeConfig).(*scrapeLoop) |
|
|
|
|
num, err := sl.append(test.scrapedSamples) |
|
|
|
|
sl.report(time.Unix(0, 0), 42*time.Second, len(test.scrapedSamples), num, err) |
|
|
|
|
reportedSamples := ingestedSamples.buffer |
|
|
|
|
if err == nil { |
|
|
|
|
reportedSamples = reportedSamples[num:] |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if !reflect.DeepEqual(reportedSamples.buffer, test.expectedReportedSamples) { |
|
|
|
|
if !reflect.DeepEqual(reportedSamples, test.expectedReportedSamples) { |
|
|
|
|
t.Errorf("Reported samples did not match expected metrics for case %d", i) |
|
|
|
|
t.Errorf("Expected: %v", test.expectedReportedSamples) |
|
|
|
|
t.Fatalf("Got: %v", reportedSamples.buffer) |
|
|
|
|
t.Fatalf("Got: %v", reportedSamples) |
|
|
|
|
} |
|
|
|
|
if test.expectedIngestedSamplesCount != len(ingestedSamples.buffer) { |
|
|
|
|
t.Fatalf("Ingested samples %d did not match expected value %d", len(ingestedSamples.buffer), test.expectedIngestedSamplesCount) |
|
|
|
|
if test.expectedPostRelabelSamplesCount != num { |
|
|
|
|
t.Fatalf("Case %d: Ingested samples %d did not match expected value %d", i, num, test.expectedPostRelabelSamplesCount) |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -478,10 +464,10 @@ func TestScrapeLoopSampleProcessing(t *testing.T) {
|
|
|
|
|
|
|
|
|
|
func TestScrapeLoopStop(t *testing.T) { |
|
|
|
|
scraper := &testScraper{} |
|
|
|
|
sl := newScrapeLoop(context.Background(), scraper, nil, nil, nil, 0) |
|
|
|
|
sl := newScrapeLoop(context.Background(), scraper, nil, nil, &config.ScrapeConfig{}) |
|
|
|
|
|
|
|
|
|
// The scrape pool synchronizes on stopping scrape loops. However, new scrape
|
|
|
|
|
// loops are syarted asynchronously. Thus it's possible, that a loop is stopped
|
|
|
|
|
// loops are started asynchronously. Thus it's possible, that a loop is stopped
|
|
|
|
|
// again before having started properly.
|
|
|
|
|
// Stopping not-yet-started loops must block until the run method was called and exited.
|
|
|
|
|
// The run method must exit immediately.
|
|
|
|
@ -528,15 +514,13 @@ func TestScrapeLoopRun(t *testing.T) {
|
|
|
|
|
signal = make(chan struct{}) |
|
|
|
|
errc = make(chan error) |
|
|
|
|
|
|
|
|
|
scraper = &testScraper{} |
|
|
|
|
app = &nopAppender{} |
|
|
|
|
mut = func(storage.SampleAppender) storage.SampleAppender { return &nopAppender{} } |
|
|
|
|
reportApp = &nopAppender{} |
|
|
|
|
scraper = &testScraper{} |
|
|
|
|
app = &nopAppender{} |
|
|
|
|
) |
|
|
|
|
defer close(signal) |
|
|
|
|
|
|
|
|
|
ctx, cancel := context.WithCancel(context.Background()) |
|
|
|
|
sl := newScrapeLoop(ctx, scraper, app, mut, reportApp, 0) |
|
|
|
|
sl := newScrapeLoop(ctx, scraper, app, nil, &config.ScrapeConfig{}) |
|
|
|
|
|
|
|
|
|
// The loop must terminate during the initial offset if the context
|
|
|
|
|
// is canceled.
|
|
|
|
@ -574,7 +558,7 @@ func TestScrapeLoopRun(t *testing.T) {
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
ctx, cancel = context.WithCancel(context.Background()) |
|
|
|
|
sl = newScrapeLoop(ctx, scraper, app, mut, reportApp, 0) |
|
|
|
|
sl = newScrapeLoop(ctx, scraper, app, nil, &config.ScrapeConfig{}) |
|
|
|
|
|
|
|
|
|
go func() { |
|
|
|
|
sl.run(time.Second, 100*time.Millisecond, errc) |
|
|
|
|