|
|
|
@ -216,12 +216,13 @@ var (
|
|
|
|
|
|
|
|
|
|
// Config is the top-level configuration for Prometheus's config files.
|
|
|
|
|
type Config struct {
|
|
|
|
|
GlobalConfig GlobalConfig `yaml:"global"`
|
|
|
|
|
AlertingConfig AlertingConfig `yaml:"alerting,omitempty"`
|
|
|
|
|
RuleFiles []string `yaml:"rule_files,omitempty"`
|
|
|
|
|
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
|
|
|
|
StorageConfig StorageConfig `yaml:"storage,omitempty"`
|
|
|
|
|
TracingConfig TracingConfig `yaml:"tracing,omitempty"`
|
|
|
|
|
GlobalConfig GlobalConfig `yaml:"global"`
|
|
|
|
|
AlertingConfig AlertingConfig `yaml:"alerting,omitempty"`
|
|
|
|
|
RuleFiles []string `yaml:"rule_files,omitempty"`
|
|
|
|
|
ScrapeConfigFiles []string `yaml:"scrape_config_files,omitempty"`
|
|
|
|
|
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
|
|
|
|
StorageConfig StorageConfig `yaml:"storage,omitempty"`
|
|
|
|
|
TracingConfig TracingConfig `yaml:"tracing,omitempty"`
|
|
|
|
|
|
|
|
|
|
RemoteWriteConfigs []*RemoteWriteConfig `yaml:"remote_write,omitempty"`
|
|
|
|
|
RemoteReadConfigs []*RemoteReadConfig `yaml:"remote_read,omitempty"`
|
|
|
|
@ -235,6 +236,9 @@ func (c *Config) SetDirectory(dir string) {
|
|
|
|
|
for i, file := range c.RuleFiles {
|
|
|
|
|
c.RuleFiles[i] = config.JoinDir(dir, file)
|
|
|
|
|
}
|
|
|
|
|
for i, file := range c.ScrapeConfigFiles {
|
|
|
|
|
c.ScrapeConfigFiles[i] = config.JoinDir(dir, file)
|
|
|
|
|
}
|
|
|
|
|
for _, c := range c.ScrapeConfigs {
|
|
|
|
|
c.SetDirectory(dir)
|
|
|
|
|
}
|
|
|
|
@ -254,6 +258,58 @@ func (c Config) String() string {
|
|
|
|
|
return string(b)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ScrapeConfigs returns the scrape configurations.
|
|
|
|
|
func (c *Config) GetScrapeConfigs() ([]*ScrapeConfig, error) {
|
|
|
|
|
scfgs := make([]*ScrapeConfig, len(c.ScrapeConfigs))
|
|
|
|
|
|
|
|
|
|
jobNames := map[string]string{}
|
|
|
|
|
for i, scfg := range c.ScrapeConfigs {
|
|
|
|
|
// We do these checks for library users that would not call Validate in
|
|
|
|
|
// Unmarshal.
|
|
|
|
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if _, ok := jobNames[scfg.JobName]; ok {
|
|
|
|
|
return nil, fmt.Errorf("found multiple scrape configs with job name %q", scfg.JobName)
|
|
|
|
|
}
|
|
|
|
|
jobNames[scfg.JobName] = "main config file"
|
|
|
|
|
scfgs[i] = scfg
|
|
|
|
|
}
|
|
|
|
|
for _, pat := range c.ScrapeConfigFiles {
|
|
|
|
|
fs, err := filepath.Glob(pat)
|
|
|
|
|
if err != nil {
|
|
|
|
|
// The only error can be a bad pattern.
|
|
|
|
|
return nil, fmt.Errorf("error retrieving scrape config files for %q: %w", pat, err)
|
|
|
|
|
}
|
|
|
|
|
for _, filename := range fs {
|
|
|
|
|
cfg := ScrapeConfigs{}
|
|
|
|
|
content, err := os.ReadFile(filename)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fileErr(filename, err)
|
|
|
|
|
}
|
|
|
|
|
err = yaml.UnmarshalStrict(content, &cfg)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fileErr(filename, err)
|
|
|
|
|
}
|
|
|
|
|
for _, scfg := range cfg.ScrapeConfigs {
|
|
|
|
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
|
|
|
|
return nil, fileErr(filename, err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if f, ok := jobNames[scfg.JobName]; ok {
|
|
|
|
|
return nil, fileErr(filename, fmt.Errorf("found multiple scrape configs with job name %q, first found in %s", scfg.JobName, f))
|
|
|
|
|
}
|
|
|
|
|
jobNames[scfg.JobName] = fmt.Sprintf("%q", filePath(filename))
|
|
|
|
|
|
|
|
|
|
scfg.SetDirectory(filepath.Dir(filename))
|
|
|
|
|
scfgs = append(scfgs, scfg)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return scfgs, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
|
|
|
|
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
|
|
|
*c = DefaultConfig
|
|
|
|
@ -276,26 +332,18 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
|
|
|
return fmt.Errorf("invalid rule file path %q", rf)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, sf := range c.ScrapeConfigFiles {
|
|
|
|
|
if !patRulePath.MatchString(sf) {
|
|
|
|
|
return fmt.Errorf("invalid scrape config file path %q", sf)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Do global overrides and validate unique names.
|
|
|
|
|
jobNames := map[string]struct{}{}
|
|
|
|
|
for _, scfg := range c.ScrapeConfigs {
|
|
|
|
|
if scfg == nil {
|
|
|
|
|
return errors.New("empty or null scrape config section")
|
|
|
|
|
}
|
|
|
|
|
// First set the correct scrape interval, then check that the timeout
|
|
|
|
|
// (inferred or explicit) is not greater than that.
|
|
|
|
|
if scfg.ScrapeInterval == 0 {
|
|
|
|
|
scfg.ScrapeInterval = c.GlobalConfig.ScrapeInterval
|
|
|
|
|
}
|
|
|
|
|
if scfg.ScrapeTimeout > scfg.ScrapeInterval {
|
|
|
|
|
return fmt.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", scfg.JobName)
|
|
|
|
|
}
|
|
|
|
|
if scfg.ScrapeTimeout == 0 {
|
|
|
|
|
if c.GlobalConfig.ScrapeTimeout > scfg.ScrapeInterval {
|
|
|
|
|
scfg.ScrapeTimeout = scfg.ScrapeInterval
|
|
|
|
|
} else {
|
|
|
|
|
scfg.ScrapeTimeout = c.GlobalConfig.ScrapeTimeout
|
|
|
|
|
}
|
|
|
|
|
if err := scfg.Validate(c.GlobalConfig.ScrapeInterval, c.GlobalConfig.ScrapeTimeout); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if _, ok := jobNames[scfg.JobName]; ok {
|
|
|
|
@ -401,6 +449,10 @@ func (c *GlobalConfig) isZero() bool {
|
|
|
|
|
c.QueryLogFile == ""
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type ScrapeConfigs struct {
|
|
|
|
|
ScrapeConfigs []*ScrapeConfig `yaml:"scrape_configs,omitempty"`
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ScrapeConfig configures a scraping unit for Prometheus.
|
|
|
|
|
type ScrapeConfig struct {
|
|
|
|
|
// The job name to which the job label is set by default.
|
|
|
|
@ -494,6 +546,28 @@ func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (c *ScrapeConfig) Validate(defaultInterval, defaultTimeout model.Duration) error {
|
|
|
|
|
if c == nil {
|
|
|
|
|
return errors.New("empty or null scrape config section")
|
|
|
|
|
}
|
|
|
|
|
// First set the correct scrape interval, then check that the timeout
|
|
|
|
|
// (inferred or explicit) is not greater than that.
|
|
|
|
|
if c.ScrapeInterval == 0 {
|
|
|
|
|
c.ScrapeInterval = defaultInterval
|
|
|
|
|
}
|
|
|
|
|
if c.ScrapeTimeout > c.ScrapeInterval {
|
|
|
|
|
return fmt.Errorf("scrape timeout greater than scrape interval for scrape config with job name %q", c.JobName)
|
|
|
|
|
}
|
|
|
|
|
if c.ScrapeTimeout == 0 {
|
|
|
|
|
if defaultTimeout > c.ScrapeInterval {
|
|
|
|
|
c.ScrapeTimeout = c.ScrapeInterval
|
|
|
|
|
} else {
|
|
|
|
|
c.ScrapeTimeout = defaultTimeout
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// MarshalYAML implements the yaml.Marshaler interface.
|
|
|
|
|
func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
|
|
|
|
|
return discovery.MarshalYAMLWithInlineConfigs(c)
|
|
|
|
@ -936,3 +1010,15 @@ func (c *RemoteReadConfig) UnmarshalYAML(unmarshal func(interface{}) error) erro
|
|
|
|
|
// Thus we just do its validation here.
|
|
|
|
|
return c.HTTPClientConfig.Validate()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func filePath(filename string) string {
|
|
|
|
|
absPath, err := filepath.Abs(filename)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return filename
|
|
|
|
|
}
|
|
|
|
|
return absPath
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func fileErr(filename string, err error) error {
|
|
|
|
|
return fmt.Errorf("%q: %w", filePath(filename), err)
|
|
|
|
|
}
|
|
|
|
|