mirror of https://github.com/prometheus/prometheus
You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
637 lines
18 KiB
637 lines
18 KiB
// Copyright 2013 The Prometheus Authors |
|
// Licensed under the Apache License, Version 2.0 (the "License"); |
|
// you may not use this file except in compliance with the License. |
|
// You may obtain a copy of the License at |
|
// |
|
// http://www.apache.org/licenses/LICENSE-2.0 |
|
// |
|
// Unless required by applicable law or agreed to in writing, software |
|
// distributed under the License is distributed on an "AS IS" BASIS, |
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
|
// See the License for the specific language governing permissions and |
|
// limitations under the License. |
|
|
|
package scrape |
|
|
|
import ( |
|
"net/http" |
|
"strconv" |
|
"testing" |
|
"time" |
|
|
|
"github.com/prometheus/common/model" |
|
"github.com/stretchr/testify/require" |
|
"gopkg.in/yaml.v2" |
|
|
|
"github.com/prometheus/prometheus/config" |
|
"github.com/prometheus/prometheus/discovery/targetgroup" |
|
"github.com/prometheus/prometheus/model/labels" |
|
"github.com/prometheus/prometheus/model/relabel" |
|
) |
|
|
|
func TestPopulateLabels(t *testing.T) { |
|
cases := []struct { |
|
in labels.Labels |
|
cfg *config.ScrapeConfig |
|
noDefaultPort bool |
|
res labels.Labels |
|
resOrig labels.Labels |
|
err string |
|
}{ |
|
// Regular population of scrape config options. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
"custom": "value", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.InstanceLabel: "1.2.3.4:1000", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
"custom": "value", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
"custom": "value", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
}, |
|
// Pre-define/overwrite scrape config labels. |
|
// Leave out port and expect it to be defaulted to scheme. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.SchemeLabel: "http", |
|
model.MetricsPathLabel: "/custom", |
|
model.JobLabel: "custom-job", |
|
model.ScrapeIntervalLabel: "2s", |
|
model.ScrapeTimeoutLabel: "2s", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:80", |
|
model.InstanceLabel: "1.2.3.4:80", |
|
model.SchemeLabel: "http", |
|
model.MetricsPathLabel: "/custom", |
|
model.JobLabel: "custom-job", |
|
model.ScrapeIntervalLabel: "2s", |
|
model.ScrapeTimeoutLabel: "2s", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.SchemeLabel: "http", |
|
model.MetricsPathLabel: "/custom", |
|
model.JobLabel: "custom-job", |
|
model.ScrapeIntervalLabel: "2s", |
|
model.ScrapeTimeoutLabel: "2s", |
|
}), |
|
}, |
|
// Provide instance label. HTTPS port default for IPv6. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "[::1]", |
|
model.InstanceLabel: "custom-instance", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "[::1]:443", |
|
model.InstanceLabel: "custom-instance", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "[::1]", |
|
model.InstanceLabel: "custom-instance", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
}, |
|
// Address label missing. |
|
{ |
|
in: labels.FromStrings("custom", "value"), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "no address", |
|
}, |
|
// Address label missing, but added in relabelling. |
|
{ |
|
in: labels.FromStrings("custom", "host:1234"), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
RelabelConfigs: []*relabel.Config{ |
|
{ |
|
Action: relabel.Replace, |
|
Regex: relabel.MustNewRegexp("(.*)"), |
|
SourceLabels: model.LabelNames{"custom"}, |
|
Replacement: "${1}", |
|
TargetLabel: string(model.AddressLabel), |
|
}, |
|
}, |
|
}, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "host:1234", |
|
model.InstanceLabel: "host:1234", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
"custom": "host:1234", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
"custom": "host:1234", |
|
}), |
|
}, |
|
// Address label missing, but added in relabelling. |
|
{ |
|
in: labels.FromStrings("custom", "host:1234"), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
RelabelConfigs: []*relabel.Config{ |
|
{ |
|
Action: relabel.Replace, |
|
Regex: relabel.MustNewRegexp("(.*)"), |
|
SourceLabels: model.LabelNames{"custom"}, |
|
Replacement: "${1}", |
|
TargetLabel: string(model.AddressLabel), |
|
}, |
|
}, |
|
}, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "host:1234", |
|
model.InstanceLabel: "host:1234", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
"custom": "host:1234", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
"custom": "host:1234", |
|
}), |
|
}, |
|
// Invalid UTF-8 in label. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
"custom": "\xbd", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "invalid label value for \"custom\": \"\\xbd\"", |
|
}, |
|
// Invalid duration in interval label. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.ScrapeIntervalLabel: "2notseconds", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"", |
|
}, |
|
// Invalid duration in timeout label. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.ScrapeTimeoutLabel: "2notseconds", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"", |
|
}, |
|
// 0 interval in timeout label. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.ScrapeIntervalLabel: "0s", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "scrape interval cannot be 0", |
|
}, |
|
// 0 duration in timeout label. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.ScrapeTimeoutLabel: "0s", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "scrape timeout cannot be 0", |
|
}, |
|
// Timeout less than interval. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:1000", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "2s", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
res: nil, |
|
resOrig: nil, |
|
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")", |
|
}, |
|
// Don't attach default port. |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
noDefaultPort: true, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.InstanceLabel: "1.2.3.4", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
}, |
|
// Remove default port (http). |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:80", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "http", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
noDefaultPort: true, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.InstanceLabel: "1.2.3.4:80", |
|
model.SchemeLabel: "http", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:80", |
|
model.SchemeLabel: "http", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
}, |
|
// Remove default port (https). |
|
{ |
|
in: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:443", |
|
}), |
|
cfg: &config.ScrapeConfig{ |
|
Scheme: "https", |
|
MetricsPath: "/metrics", |
|
JobName: "job", |
|
ScrapeInterval: model.Duration(time.Second), |
|
ScrapeTimeout: model.Duration(time.Second), |
|
}, |
|
noDefaultPort: true, |
|
res: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4", |
|
model.InstanceLabel: "1.2.3.4:443", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
resOrig: labels.FromMap(map[string]string{ |
|
model.AddressLabel: "1.2.3.4:443", |
|
model.SchemeLabel: "https", |
|
model.MetricsPathLabel: "/metrics", |
|
model.JobLabel: "job", |
|
model.ScrapeIntervalLabel: "1s", |
|
model.ScrapeTimeoutLabel: "1s", |
|
}), |
|
}, |
|
} |
|
for _, c := range cases { |
|
in := c.in.Copy() |
|
|
|
res, orig, err := PopulateLabels(c.in, c.cfg, c.noDefaultPort) |
|
if c.err != "" { |
|
require.EqualError(t, err, c.err) |
|
} else { |
|
require.NoError(t, err) |
|
} |
|
require.Equal(t, c.in, in) |
|
require.Equal(t, c.res, res) |
|
require.Equal(t, c.resOrig, orig) |
|
} |
|
} |
|
|
|
func loadConfiguration(t *testing.T, c string) *config.Config { |
|
t.Helper() |
|
|
|
cfg := &config.Config{} |
|
if err := yaml.UnmarshalStrict([]byte(c), cfg); err != nil { |
|
t.Fatalf("Unable to load YAML config: %s", err) |
|
} |
|
return cfg |
|
} |
|
|
|
func noopLoop() loop { |
|
return &testLoop{ |
|
startFunc: func(interval, timeout time.Duration, errc chan<- error) {}, |
|
stopFunc: func() {}, |
|
} |
|
} |
|
|
|
func TestManagerApplyConfig(t *testing.T) { |
|
// Valid initial configuration. |
|
cfgText1 := ` |
|
scrape_configs: |
|
- job_name: job1 |
|
static_configs: |
|
- targets: ["foo:9090"] |
|
` |
|
// Invalid configuration. |
|
cfgText2 := ` |
|
scrape_configs: |
|
- job_name: job1 |
|
scheme: https |
|
static_configs: |
|
- targets: ["foo:9090"] |
|
tls_config: |
|
ca_file: /not/existing/ca/file |
|
` |
|
// Valid configuration. |
|
cfgText3 := ` |
|
scrape_configs: |
|
- job_name: job1 |
|
scheme: https |
|
static_configs: |
|
- targets: ["foo:9090"] |
|
` |
|
var ( |
|
cfg1 = loadConfiguration(t, cfgText1) |
|
cfg2 = loadConfiguration(t, cfgText2) |
|
cfg3 = loadConfiguration(t, cfgText3) |
|
|
|
ch = make(chan struct{}, 1) |
|
) |
|
|
|
opts := Options{} |
|
scrapeManager := NewManager(&opts, nil, nil) |
|
newLoop := func(scrapeLoopOptions) loop { |
|
ch <- struct{}{} |
|
return noopLoop() |
|
} |
|
sp := &scrapePool{ |
|
appendable: &nopAppendable{}, |
|
activeTargets: map[uint64]*Target{ |
|
1: {}, |
|
}, |
|
loops: map[uint64]loop{ |
|
1: noopLoop(), |
|
}, |
|
newLoop: newLoop, |
|
logger: nil, |
|
config: cfg1.ScrapeConfigs[0], |
|
client: http.DefaultClient, |
|
} |
|
scrapeManager.scrapePools = map[string]*scrapePool{ |
|
"job1": sp, |
|
} |
|
|
|
// Apply the initial configuration. |
|
if err := scrapeManager.ApplyConfig(cfg1); err != nil { |
|
t.Fatalf("unable to apply configuration: %s", err) |
|
} |
|
select { |
|
case <-ch: |
|
t.Fatal("reload happened") |
|
default: |
|
} |
|
|
|
// Apply a configuration for which the reload fails. |
|
if err := scrapeManager.ApplyConfig(cfg2); err == nil { |
|
t.Fatalf("expecting error but got none") |
|
} |
|
select { |
|
case <-ch: |
|
t.Fatal("reload happened") |
|
default: |
|
} |
|
|
|
// Apply a configuration for which the reload succeeds. |
|
if err := scrapeManager.ApplyConfig(cfg3); err != nil { |
|
t.Fatalf("unable to apply configuration: %s", err) |
|
} |
|
select { |
|
case <-ch: |
|
default: |
|
t.Fatal("reload didn't happen") |
|
} |
|
|
|
// Re-applying the same configuration shouldn't trigger a reload. |
|
if err := scrapeManager.ApplyConfig(cfg3); err != nil { |
|
t.Fatalf("unable to apply configuration: %s", err) |
|
} |
|
select { |
|
case <-ch: |
|
t.Fatal("reload happened") |
|
default: |
|
} |
|
} |
|
|
|
func TestManagerTargetsUpdates(t *testing.T) { |
|
opts := Options{} |
|
m := NewManager(&opts, nil, nil) |
|
|
|
ts := make(chan map[string][]*targetgroup.Group) |
|
go m.Run(ts) |
|
defer m.Stop() |
|
|
|
tgSent := make(map[string][]*targetgroup.Group) |
|
for x := 0; x < 10; x++ { |
|
|
|
tgSent[strconv.Itoa(x)] = []*targetgroup.Group{ |
|
{ |
|
Source: strconv.Itoa(x), |
|
}, |
|
} |
|
|
|
select { |
|
case ts <- tgSent: |
|
case <-time.After(10 * time.Millisecond): |
|
t.Error("Scrape manager's channel remained blocked after the set threshold.") |
|
} |
|
} |
|
|
|
m.mtxScrape.Lock() |
|
tsetActual := m.targetSets |
|
m.mtxScrape.Unlock() |
|
|
|
// Make sure all updates have been received. |
|
require.Equal(t, tgSent, tsetActual) |
|
|
|
select { |
|
case <-m.triggerReload: |
|
default: |
|
t.Error("No scrape loops reload was triggered after targets update.") |
|
} |
|
} |
|
|
|
func TestSetJitter(t *testing.T) { |
|
getConfig := func(prometheus string) *config.Config { |
|
cfgText := ` |
|
global: |
|
external_labels: |
|
prometheus: '` + prometheus + `' |
|
` |
|
|
|
cfg := &config.Config{} |
|
if err := yaml.UnmarshalStrict([]byte(cfgText), cfg); err != nil { |
|
t.Fatalf("Unable to load YAML config cfgYaml: %s", err) |
|
} |
|
|
|
return cfg |
|
} |
|
|
|
opts := Options{} |
|
scrapeManager := NewManager(&opts, nil, nil) |
|
|
|
// Load the first config. |
|
cfg1 := getConfig("ha1") |
|
if err := scrapeManager.setJitterSeed(cfg1.GlobalConfig.ExternalLabels); err != nil { |
|
t.Error(err) |
|
} |
|
jitter1 := scrapeManager.jitterSeed |
|
|
|
if jitter1 == 0 { |
|
t.Error("Jitter has to be a hash of uint64") |
|
} |
|
|
|
// Load the first config. |
|
cfg2 := getConfig("ha2") |
|
if err := scrapeManager.setJitterSeed(cfg2.GlobalConfig.ExternalLabels); err != nil { |
|
t.Error(err) |
|
} |
|
jitter2 := scrapeManager.jitterSeed |
|
|
|
if jitter1 == jitter2 { |
|
t.Error("Jitter should not be the same on different set of external labels") |
|
} |
|
}
|
|
|