2015-01-21 19:07:45 +00:00
|
|
|
// Copyright 2013 The Prometheus Authors
|
2013-01-15 16:06:17 +00:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-02-01 09:55:07 +00:00
|
|
|
package scrape
|
2016-08-25 18:36:26 +00:00
|
|
|
|
|
|
|
import (
|
2022-12-23 10:55:08 +00:00
|
|
|
"context"
|
2023-12-11 08:43:42 +00:00
|
|
|
"fmt"
|
2019-04-10 12:20:00 +00:00
|
|
|
"net/http"
|
2023-12-11 08:43:42 +00:00
|
|
|
"net/http/httptest"
|
|
|
|
"net/url"
|
|
|
|
"os"
|
2018-09-26 09:20:56 +00:00
|
|
|
"strconv"
|
2023-12-11 08:43:42 +00:00
|
|
|
"sync"
|
2016-08-25 18:36:26 +00:00
|
|
|
"testing"
|
2018-09-26 09:20:56 +00:00
|
|
|
"time"
|
2016-08-25 18:36:26 +00:00
|
|
|
|
2023-12-11 08:43:42 +00:00
|
|
|
"github.com/go-kit/log"
|
|
|
|
"github.com/gogo/protobuf/proto"
|
2023-09-22 16:47:44 +00:00
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
2023-12-11 08:43:42 +00:00
|
|
|
dto "github.com/prometheus/client_model/go"
|
2016-09-05 12:17:10 +00:00
|
|
|
"github.com/prometheus/common/model"
|
2020-10-29 09:43:23 +00:00
|
|
|
"github.com/stretchr/testify/require"
|
2023-12-11 08:43:42 +00:00
|
|
|
"google.golang.org/protobuf/types/known/timestamppb"
|
2022-08-31 13:50:38 +00:00
|
|
|
"gopkg.in/yaml.v2"
|
2019-03-25 23:01:12 +00:00
|
|
|
|
2016-08-25 18:36:26 +00:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2022-12-23 10:55:08 +00:00
|
|
|
"github.com/prometheus/prometheus/discovery"
|
2018-09-26 09:20:56 +00:00
|
|
|
"github.com/prometheus/prometheus/discovery/targetgroup"
|
2021-11-08 14:23:17 +00:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
|
|
|
"github.com/prometheus/prometheus/model/relabel"
|
2023-12-11 08:43:42 +00:00
|
|
|
"github.com/prometheus/prometheus/util/runutil"
|
2024-01-24 16:48:22 +00:00
|
|
|
"github.com/prometheus/prometheus/util/testutil"
|
2016-08-25 18:36:26 +00:00
|
|
|
)
|
|
|
|
|
2016-09-05 12:17:10 +00:00
|
|
|
func TestPopulateLabels(t *testing.T) {
|
|
|
|
cases := []struct {
|
2022-07-20 11:35:47 +00:00
|
|
|
in labels.Labels
|
|
|
|
cfg *config.ScrapeConfig
|
|
|
|
noDefaultPort bool
|
|
|
|
res labels.Labels
|
|
|
|
resOrig labels.Labels
|
|
|
|
err string
|
2016-09-05 12:17:10 +00:00
|
|
|
}{
|
|
|
|
// Regular population of scrape config options.
|
|
|
|
{
|
2016-12-29 08:27:30 +00:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 12:17:10 +00:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "value",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
2016-12-29 08:27:30 +00:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.InstanceLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "value",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
"custom": "value",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
|
|
|
// Pre-define/overwrite scrape config labels.
|
|
|
|
// Leave out port and expect it to be defaulted to scheme.
|
|
|
|
{
|
2016-12-29 08:27:30 +00:00
|
|
|
in: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
2016-12-29 08:27:30 +00:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/custom",
|
|
|
|
model.JobLabel: "custom-job",
|
|
|
|
model.ScrapeIntervalLabel: "2s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
|
|
|
// Provide instance label. HTTPS port default for IPv6.
|
|
|
|
{
|
2016-12-29 08:27:30 +00:00
|
|
|
in: labels.FromMap(map[string]string{
|
2016-09-05 12:17:10 +00:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
2016-12-29 08:27:30 +00:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "[::1]:443",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "[::1]",
|
|
|
|
model.InstanceLabel: "custom-instance",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
2017-06-09 15:18:19 +00:00
|
|
|
// Address label missing.
|
2016-09-05 12:17:10 +00:00
|
|
|
{
|
2017-06-23 11:15:44 +00:00
|
|
|
in: labels.FromStrings("custom", "value"),
|
2017-06-09 15:18:19 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2017-06-09 15:18:19 +00:00
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2020-10-22 09:00:08 +00:00
|
|
|
err: "no address",
|
2017-06-09 15:18:19 +00:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 11:15:44 +00:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2017-06-09 15:18:19 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2018-12-18 11:26:36 +00:00
|
|
|
RelabelConfigs: []*relabel.Config{
|
2017-06-09 15:18:19 +00:00
|
|
|
{
|
2018-12-18 11:26:36 +00:00
|
|
|
Action: relabel.Replace,
|
|
|
|
Regex: relabel.MustNewRegexp("(.*)"),
|
2017-06-09 15:18:19 +00:00
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 11:15:44 +00:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 11:15:44 +00:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2016-12-29 08:27:30 +00:00
|
|
|
}),
|
2017-06-09 15:18:19 +00:00
|
|
|
},
|
|
|
|
// Address label missing, but added in relabelling.
|
|
|
|
{
|
2017-06-23 11:15:44 +00:00
|
|
|
in: labels.FromStrings("custom", "host:1234"),
|
2016-09-05 12:17:10 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2018-12-18 11:26:36 +00:00
|
|
|
RelabelConfigs: []*relabel.Config{
|
2016-09-05 12:17:10 +00:00
|
|
|
{
|
2018-12-18 11:26:36 +00:00
|
|
|
Action: relabel.Replace,
|
|
|
|
Regex: relabel.MustNewRegexp("(.*)"),
|
2017-06-09 15:18:19 +00:00
|
|
|
SourceLabels: model.LabelNames{"custom"},
|
|
|
|
Replacement: "${1}",
|
|
|
|
TargetLabel: string(model.AddressLabel),
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-06-23 11:15:44 +00:00
|
|
|
res: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.AddressLabel: "host:1234",
|
|
|
|
model.InstanceLabel: "host:1234",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 11:15:44 +00:00
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
2021-08-31 15:37:32 +00:00
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
"custom": "host:1234",
|
2017-06-23 11:15:44 +00:00
|
|
|
}),
|
2017-06-09 15:18:19 +00:00
|
|
|
},
|
|
|
|
// Invalid UTF-8 in label.
|
|
|
|
{
|
2017-06-23 11:15:44 +00:00
|
|
|
in: labels.FromMap(map[string]string{
|
2017-06-09 15:18:19 +00:00
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
"custom": "\xbd",
|
2017-06-23 11:15:44 +00:00
|
|
|
}),
|
2017-06-09 15:18:19 +00:00
|
|
|
cfg: &config.ScrapeConfig{
|
2021-08-31 15:37:32 +00:00
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
2017-06-09 15:18:19 +00:00
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2020-10-22 09:00:08 +00:00
|
|
|
err: "invalid label value for \"custom\": \"\\xbd\"",
|
2016-09-05 12:17:10 +00:00
|
|
|
},
|
2021-08-31 15:37:32 +00:00
|
|
|
// Invalid duration in interval label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "2notseconds",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2023-03-08 15:32:39 +00:00
|
|
|
err: "error parsing scrape interval: unknown unit \"notseconds\" in duration \"2notseconds\"",
|
2021-08-31 15:37:32 +00:00
|
|
|
},
|
|
|
|
// Invalid duration in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeTimeoutLabel: "2notseconds",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2023-03-08 15:32:39 +00:00
|
|
|
err: "error parsing scrape timeout: unknown unit \"notseconds\" in duration \"2notseconds\"",
|
2021-08-31 15:37:32 +00:00
|
|
|
},
|
|
|
|
// 0 interval in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "0s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 15:37:32 +00:00
|
|
|
err: "scrape interval cannot be 0",
|
|
|
|
},
|
|
|
|
// 0 duration in timeout label.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeTimeoutLabel: "0s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 15:37:32 +00:00
|
|
|
err: "scrape timeout cannot be 0",
|
|
|
|
},
|
|
|
|
// Timeout less than interval.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:1000",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "2s",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
2022-05-30 14:37:16 +00:00
|
|
|
res: labels.EmptyLabels(),
|
|
|
|
resOrig: labels.EmptyLabels(),
|
2021-08-31 15:37:32 +00:00
|
|
|
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
|
|
|
|
},
|
2022-07-20 11:35:47 +00:00
|
|
|
// Don't attach default port.
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
// Remove default port (http).
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "http",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:80",
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
|
|
|
// Remove default port (https).
|
|
|
|
{
|
|
|
|
in: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:443",
|
|
|
|
}),
|
|
|
|
cfg: &config.ScrapeConfig{
|
|
|
|
Scheme: "https",
|
|
|
|
MetricsPath: "/metrics",
|
|
|
|
JobName: "job",
|
|
|
|
ScrapeInterval: model.Duration(time.Second),
|
|
|
|
ScrapeTimeout: model.Duration(time.Second),
|
|
|
|
},
|
|
|
|
noDefaultPort: true,
|
|
|
|
res: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4",
|
|
|
|
model.InstanceLabel: "1.2.3.4:443",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
resOrig: labels.FromMap(map[string]string{
|
|
|
|
model.AddressLabel: "1.2.3.4:443",
|
|
|
|
model.SchemeLabel: "https",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "job",
|
|
|
|
model.ScrapeIntervalLabel: "1s",
|
|
|
|
model.ScrapeTimeoutLabel: "1s",
|
|
|
|
}),
|
|
|
|
},
|
2016-09-05 12:17:10 +00:00
|
|
|
}
|
2018-04-27 12:11:16 +00:00
|
|
|
for _, c := range cases {
|
2017-03-08 14:37:12 +00:00
|
|
|
in := c.in.Copy()
|
|
|
|
|
2023-02-28 19:35:31 +00:00
|
|
|
res, orig, err := PopulateLabels(labels.NewBuilder(c.in), c.cfg, c.noDefaultPort)
|
2020-10-22 09:00:08 +00:00
|
|
|
if c.err != "" {
|
2020-10-29 09:43:23 +00:00
|
|
|
require.EqualError(t, err, c.err)
|
2020-10-22 09:00:08 +00:00
|
|
|
} else {
|
2020-10-29 09:43:23 +00:00
|
|
|
require.NoError(t, err)
|
2020-10-22 09:00:08 +00:00
|
|
|
}
|
2020-10-29 09:43:23 +00:00
|
|
|
require.Equal(t, c.in, in)
|
2024-01-24 16:48:22 +00:00
|
|
|
testutil.RequireEqual(t, c.res, res)
|
|
|
|
testutil.RequireEqual(t, c.resOrig, orig)
|
2016-09-05 12:17:10 +00:00
|
|
|
}
|
|
|
|
}
|
2018-01-19 11:36:21 +00:00
|
|
|
|
2023-02-28 16:12:27 +00:00
|
|
|
func loadConfiguration(t testing.TB, c string) *config.Config {
|
2019-02-13 13:24:22 +00:00
|
|
|
t.Helper()
|
2018-01-19 11:36:21 +00:00
|
|
|
|
2019-02-13 13:24:22 +00:00
|
|
|
cfg := &config.Config{}
|
2021-09-04 12:35:03 +00:00
|
|
|
err := yaml.UnmarshalStrict([]byte(c), cfg)
|
|
|
|
require.NoError(t, err, "Unable to load YAML config.")
|
|
|
|
|
2019-02-13 13:24:22 +00:00
|
|
|
return cfg
|
|
|
|
}
|
|
|
|
|
|
|
|
func noopLoop() loop {
|
|
|
|
return &testLoop{
|
|
|
|
startFunc: func(interval, timeout time.Duration, errc chan<- error) {},
|
|
|
|
stopFunc: func() {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestManagerApplyConfig(t *testing.T) {
|
|
|
|
// Valid initial configuration.
|
|
|
|
cfgText1 := `
|
2018-07-04 11:01:19 +00:00
|
|
|
scrape_configs:
|
2019-02-13 13:24:22 +00:00
|
|
|
- job_name: job1
|
2018-07-04 11:01:19 +00:00
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
`
|
2019-02-13 13:24:22 +00:00
|
|
|
// Invalid configuration.
|
|
|
|
cfgText2 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
scheme: https
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
tls_config:
|
|
|
|
ca_file: /not/existing/ca/file
|
|
|
|
`
|
|
|
|
// Valid configuration.
|
|
|
|
cfgText3 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
scheme: https
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
`
|
|
|
|
var (
|
|
|
|
cfg1 = loadConfiguration(t, cfgText1)
|
|
|
|
cfg2 = loadConfiguration(t, cfgText2)
|
|
|
|
cfg3 = loadConfiguration(t, cfgText3)
|
2018-01-19 11:36:21 +00:00
|
|
|
|
2019-02-13 13:24:22 +00:00
|
|
|
ch = make(chan struct{}, 1)
|
2023-09-22 16:47:44 +00:00
|
|
|
|
|
|
|
testRegistry = prometheus.NewRegistry()
|
2019-02-13 13:24:22 +00:00
|
|
|
)
|
2018-07-04 11:01:19 +00:00
|
|
|
|
2021-08-24 12:31:14 +00:00
|
|
|
opts := Options{}
|
2023-09-22 16:47:44 +00:00
|
|
|
scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
|
|
|
|
require.NoError(t, err)
|
2019-03-12 10:26:18 +00:00
|
|
|
newLoop := func(scrapeLoopOptions) loop {
|
2019-02-13 13:24:22 +00:00
|
|
|
ch <- struct{}{}
|
|
|
|
return noopLoop()
|
2018-01-19 11:36:21 +00:00
|
|
|
}
|
|
|
|
sp := &scrapePool{
|
2022-06-28 09:58:52 +00:00
|
|
|
appendable: &nopAppendable{},
|
|
|
|
activeTargets: map[uint64]*Target{
|
|
|
|
1: {},
|
|
|
|
},
|
2018-01-19 11:36:21 +00:00
|
|
|
loops: map[uint64]loop{
|
2019-02-13 13:24:22 +00:00
|
|
|
1: noopLoop(),
|
2018-01-19 11:36:21 +00:00
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
2019-02-13 13:24:22 +00:00
|
|
|
config: cfg1.ScrapeConfigs[0],
|
2019-04-10 12:20:00 +00:00
|
|
|
client: http.DefaultClient,
|
2023-09-22 16:47:44 +00:00
|
|
|
metrics: scrapeManager.metrics,
|
2018-01-19 11:36:21 +00:00
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{
|
2019-02-13 13:24:22 +00:00
|
|
|
"job1": sp,
|
2018-01-19 11:36:21 +00:00
|
|
|
}
|
|
|
|
|
2019-02-13 13:24:22 +00:00
|
|
|
// Apply the initial configuration.
|
2021-09-04 12:35:03 +00:00
|
|
|
err = scrapeManager.ApplyConfig(cfg1)
|
|
|
|
require.NoError(t, err, "Unable to apply configuration.")
|
2019-02-13 13:24:22 +00:00
|
|
|
select {
|
|
|
|
case <-ch:
|
2021-09-04 12:35:03 +00:00
|
|
|
require.FailNow(t, "Reload happened.")
|
2019-02-13 13:24:22 +00:00
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply a configuration for which the reload fails.
|
2021-09-04 12:35:03 +00:00
|
|
|
err = scrapeManager.ApplyConfig(cfg2)
|
|
|
|
require.Error(t, err, "Expecting error but got none.")
|
2019-02-13 13:24:22 +00:00
|
|
|
select {
|
|
|
|
case <-ch:
|
2021-09-04 12:35:03 +00:00
|
|
|
require.FailNow(t, "Reload happened.")
|
2019-02-13 13:24:22 +00:00
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply a configuration for which the reload succeeds.
|
2021-09-04 12:35:03 +00:00
|
|
|
err = scrapeManager.ApplyConfig(cfg3)
|
|
|
|
require.NoError(t, err, "Unable to apply configuration.")
|
2019-02-13 13:24:22 +00:00
|
|
|
select {
|
|
|
|
case <-ch:
|
|
|
|
default:
|
2021-09-04 12:35:03 +00:00
|
|
|
require.FailNow(t, "Reload didn't happen.")
|
2019-02-13 13:24:22 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Re-applying the same configuration shouldn't trigger a reload.
|
2021-09-04 12:35:03 +00:00
|
|
|
err = scrapeManager.ApplyConfig(cfg3)
|
|
|
|
require.NoError(t, err, "Unable to apply configuration.")
|
2019-02-13 13:24:22 +00:00
|
|
|
select {
|
|
|
|
case <-ch:
|
2021-09-04 12:35:03 +00:00
|
|
|
require.FailNow(t, "Reload happened.")
|
2019-02-13 13:24:22 +00:00
|
|
|
default:
|
|
|
|
}
|
2018-01-19 11:36:21 +00:00
|
|
|
}
|
2018-09-26 09:20:56 +00:00
|
|
|
|
|
|
|
func TestManagerTargetsUpdates(t *testing.T) {
|
2021-08-24 12:31:14 +00:00
|
|
|
opts := Options{}
|
2023-09-22 16:47:44 +00:00
|
|
|
testRegistry := prometheus.NewRegistry()
|
|
|
|
m, err := NewManager(&opts, nil, nil, testRegistry)
|
|
|
|
require.NoError(t, err)
|
2018-09-26 09:20:56 +00:00
|
|
|
|
|
|
|
ts := make(chan map[string][]*targetgroup.Group)
|
|
|
|
go m.Run(ts)
|
2019-09-23 10:28:37 +00:00
|
|
|
defer m.Stop()
|
2018-09-26 09:20:56 +00:00
|
|
|
|
|
|
|
tgSent := make(map[string][]*targetgroup.Group)
|
|
|
|
for x := 0; x < 10; x++ {
|
|
|
|
|
|
|
|
tgSent[strconv.Itoa(x)] = []*targetgroup.Group{
|
2019-01-16 22:28:08 +00:00
|
|
|
{
|
2018-09-26 09:20:56 +00:00
|
|
|
Source: strconv.Itoa(x),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
select {
|
|
|
|
case ts <- tgSent:
|
|
|
|
case <-time.After(10 * time.Millisecond):
|
2021-09-04 12:35:03 +00:00
|
|
|
require.Fail(t, "Scrape manager's channel remained blocked after the set threshold.")
|
2018-09-26 09:20:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
m.mtxScrape.Lock()
|
|
|
|
tsetActual := m.targetSets
|
|
|
|
m.mtxScrape.Unlock()
|
|
|
|
|
|
|
|
// Make sure all updates have been received.
|
2020-10-29 09:43:23 +00:00
|
|
|
require.Equal(t, tgSent, tsetActual)
|
2018-09-26 09:20:56 +00:00
|
|
|
|
|
|
|
select {
|
|
|
|
case <-m.triggerReload:
|
|
|
|
default:
|
2021-09-04 12:35:03 +00:00
|
|
|
require.Fail(t, "No scrape loops reload was triggered after targets update.")
|
2018-09-26 09:20:56 +00:00
|
|
|
}
|
|
|
|
}
|
2019-03-12 10:46:15 +00:00
|
|
|
|
2023-05-25 09:49:43 +00:00
|
|
|
func TestSetOffsetSeed(t *testing.T) {
|
2019-03-12 10:46:15 +00:00
|
|
|
getConfig := func(prometheus string) *config.Config {
|
|
|
|
cfgText := `
|
|
|
|
global:
|
|
|
|
external_labels:
|
|
|
|
prometheus: '` + prometheus + `'
|
|
|
|
`
|
|
|
|
|
|
|
|
cfg := &config.Config{}
|
2021-09-04 12:35:03 +00:00
|
|
|
err := yaml.UnmarshalStrict([]byte(cfgText), cfg)
|
|
|
|
require.NoError(t, err, "Unable to load YAML config cfgYaml.")
|
2019-03-12 10:46:15 +00:00
|
|
|
|
|
|
|
return cfg
|
|
|
|
}
|
|
|
|
|
2021-08-24 12:31:14 +00:00
|
|
|
opts := Options{}
|
2023-09-22 16:47:44 +00:00
|
|
|
testRegistry := prometheus.NewRegistry()
|
|
|
|
scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
|
|
|
|
require.NoError(t, err)
|
2019-03-12 10:46:15 +00:00
|
|
|
|
|
|
|
// Load the first config.
|
|
|
|
cfg1 := getConfig("ha1")
|
2021-09-04 12:35:03 +00:00
|
|
|
err = scrapeManager.setOffsetSeed(cfg1.GlobalConfig.ExternalLabels)
|
|
|
|
require.NoError(t, err)
|
2023-05-25 09:49:43 +00:00
|
|
|
offsetSeed1 := scrapeManager.offsetSeed
|
2019-03-12 10:46:15 +00:00
|
|
|
|
2021-09-04 12:35:03 +00:00
|
|
|
require.NotZero(t, offsetSeed1, "Offset seed has to be a hash of uint64.")
|
2019-03-12 10:46:15 +00:00
|
|
|
|
|
|
|
// Load the first config.
|
|
|
|
cfg2 := getConfig("ha2")
|
2021-09-04 12:35:03 +00:00
|
|
|
require.NoError(t, scrapeManager.setOffsetSeed(cfg2.GlobalConfig.ExternalLabels))
|
2023-05-25 09:49:43 +00:00
|
|
|
offsetSeed2 := scrapeManager.offsetSeed
|
2019-03-12 10:46:15 +00:00
|
|
|
|
2021-09-04 12:35:03 +00:00
|
|
|
require.NotEqual(t, offsetSeed1, offsetSeed2, "Offset seed should not be the same on different set of external labels.")
|
2019-03-12 10:46:15 +00:00
|
|
|
}
|
2022-12-23 10:55:08 +00:00
|
|
|
|
|
|
|
func TestManagerScrapePools(t *testing.T) {
|
|
|
|
cfgText1 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090"]
|
|
|
|
- job_name: job2
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9091", "foo:9092"]
|
|
|
|
`
|
|
|
|
cfgText2 := `
|
|
|
|
scrape_configs:
|
|
|
|
- job_name: job1
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9090", "foo:9094"]
|
|
|
|
- job_name: job3
|
|
|
|
static_configs:
|
|
|
|
- targets: ["foo:9093"]
|
|
|
|
`
|
|
|
|
var (
|
2023-09-22 16:47:44 +00:00
|
|
|
cfg1 = loadConfiguration(t, cfgText1)
|
|
|
|
cfg2 = loadConfiguration(t, cfgText2)
|
|
|
|
testRegistry = prometheus.NewRegistry()
|
2022-12-23 10:55:08 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
reload := func(scrapeManager *Manager, cfg *config.Config) {
|
|
|
|
newLoop := func(scrapeLoopOptions) loop {
|
|
|
|
return noopLoop()
|
|
|
|
}
|
|
|
|
scrapeManager.scrapePools = map[string]*scrapePool{}
|
|
|
|
for _, sc := range cfg.ScrapeConfigs {
|
|
|
|
_, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
sp := &scrapePool{
|
|
|
|
appendable: &nopAppendable{},
|
|
|
|
activeTargets: map[uint64]*Target{},
|
|
|
|
loops: map[uint64]loop{
|
|
|
|
1: noopLoop(),
|
|
|
|
},
|
|
|
|
newLoop: newLoop,
|
|
|
|
logger: nil,
|
|
|
|
config: sc,
|
|
|
|
client: http.DefaultClient,
|
|
|
|
cancel: cancel,
|
|
|
|
}
|
|
|
|
for _, c := range sc.ServiceDiscoveryConfigs {
|
|
|
|
staticConfig := c.(discovery.StaticConfig)
|
|
|
|
for _, group := range staticConfig {
|
|
|
|
for i := range group.Targets {
|
|
|
|
sp.activeTargets[uint64(i)] = &Target{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
scrapeManager.scrapePools[sc.JobName] = sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
opts := Options{}
|
2023-09-22 16:47:44 +00:00
|
|
|
scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
|
|
|
|
require.NoError(t, err)
|
2022-12-23 10:55:08 +00:00
|
|
|
|
|
|
|
reload(scrapeManager, cfg1)
|
|
|
|
require.ElementsMatch(t, []string{"job1", "job2"}, scrapeManager.ScrapePools())
|
|
|
|
|
|
|
|
reload(scrapeManager, cfg2)
|
|
|
|
require.ElementsMatch(t, []string{"job1", "job3"}, scrapeManager.ScrapePools())
|
|
|
|
}
|
2023-12-11 08:43:42 +00:00
|
|
|
|
|
|
|
// TestManagerCTZeroIngestion tests scrape manager for CT cases.
|
|
|
|
func TestManagerCTZeroIngestion(t *testing.T) {
|
|
|
|
const mName = "expected_counter"
|
|
|
|
|
|
|
|
for _, tc := range []struct {
|
|
|
|
name string
|
|
|
|
counterSample *dto.Counter
|
|
|
|
enableCTZeroIngestion bool
|
|
|
|
|
|
|
|
expectedValues []float64
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "disabled with CT on counter",
|
|
|
|
counterSample: &dto.Counter{
|
|
|
|
Value: proto.Float64(1.0),
|
|
|
|
// Timestamp does not matter as long as it exists in this test.
|
|
|
|
CreatedTimestamp: timestamppb.Now(),
|
|
|
|
},
|
|
|
|
expectedValues: []float64{1.0},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "enabled with CT on counter",
|
|
|
|
counterSample: &dto.Counter{
|
|
|
|
Value: proto.Float64(1.0),
|
|
|
|
// Timestamp does not matter as long as it exists in this test.
|
|
|
|
CreatedTimestamp: timestamppb.Now(),
|
|
|
|
},
|
|
|
|
enableCTZeroIngestion: true,
|
|
|
|
expectedValues: []float64{0.0, 1.0},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "enabled without CT on counter",
|
|
|
|
counterSample: &dto.Counter{
|
|
|
|
Value: proto.Float64(1.0),
|
|
|
|
},
|
|
|
|
enableCTZeroIngestion: true,
|
|
|
|
expectedValues: []float64{1.0},
|
|
|
|
},
|
|
|
|
} {
|
|
|
|
t.Run(tc.name, func(t *testing.T) {
|
|
|
|
app := &collectResultAppender{}
|
|
|
|
scrapeManager, err := NewManager(
|
|
|
|
&Options{
|
|
|
|
EnableCreatedTimestampZeroIngestion: tc.enableCTZeroIngestion,
|
|
|
|
skipOffsetting: true,
|
|
|
|
},
|
|
|
|
log.NewLogfmtLogger(os.Stderr),
|
|
|
|
&collectResultAppendable{app},
|
|
|
|
prometheus.NewRegistry(),
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
require.NoError(t, scrapeManager.ApplyConfig(&config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{
|
|
|
|
// Disable regular scrapes.
|
|
|
|
ScrapeInterval: model.Duration(9999 * time.Minute),
|
|
|
|
ScrapeTimeout: model.Duration(5 * time.Second),
|
|
|
|
// Ensure the proto is chosen. We need proto as it's the only protocol
|
|
|
|
// with the CT parsing support.
|
|
|
|
ScrapeProtocols: []config.ScrapeProtocol{config.PrometheusProto},
|
|
|
|
},
|
|
|
|
ScrapeConfigs: []*config.ScrapeConfig{{JobName: "test"}},
|
|
|
|
}))
|
|
|
|
|
|
|
|
once := sync.Once{}
|
|
|
|
// Start fake HTTP target to that allow one scrape only.
|
|
|
|
server := httptest.NewServer(
|
|
|
|
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
fail := true
|
|
|
|
once.Do(func() {
|
|
|
|
fail = false
|
|
|
|
w.Header().Set("Content-Type", `application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited`)
|
|
|
|
|
|
|
|
ctrType := dto.MetricType_COUNTER
|
|
|
|
w.Write(protoMarshalDelimited(t, &dto.MetricFamily{
|
|
|
|
Name: proto.String(mName),
|
|
|
|
Type: &ctrType,
|
|
|
|
Metric: []*dto.Metric{{Counter: tc.counterSample}},
|
|
|
|
}))
|
|
|
|
})
|
|
|
|
|
|
|
|
if fail {
|
|
|
|
w.WriteHeader(http.StatusInternalServerError)
|
|
|
|
}
|
|
|
|
}),
|
|
|
|
)
|
|
|
|
defer server.Close()
|
|
|
|
|
|
|
|
serverURL, err := url.Parse(server.URL)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Add fake target directly into tsets + reload. Normally users would use
|
|
|
|
// Manager.Run and wait for minimum 5s refresh interval.
|
|
|
|
scrapeManager.updateTsets(map[string][]*targetgroup.Group{
|
|
|
|
"test": {{
|
|
|
|
Targets: []model.LabelSet{{
|
|
|
|
model.SchemeLabel: model.LabelValue(serverURL.Scheme),
|
|
|
|
model.AddressLabel: model.LabelValue(serverURL.Host),
|
|
|
|
}},
|
|
|
|
}},
|
|
|
|
})
|
|
|
|
scrapeManager.reload()
|
|
|
|
|
|
|
|
// Wait for one scrape.
|
|
|
|
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Minute)
|
|
|
|
defer cancel()
|
|
|
|
require.NoError(t, runutil.Retry(100*time.Millisecond, ctx.Done(), func() error {
|
|
|
|
if countFloatSamples(app, mName) != len(tc.expectedValues) {
|
|
|
|
return fmt.Errorf("expected %v samples", tc.expectedValues)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}), "after 1 minute")
|
|
|
|
scrapeManager.Stop()
|
|
|
|
|
|
|
|
require.Equal(t, tc.expectedValues, getResultFloats(app, mName))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func countFloatSamples(a *collectResultAppender, expectedMetricName string) (count int) {
|
|
|
|
a.mtx.Lock()
|
|
|
|
defer a.mtx.Unlock()
|
|
|
|
|
|
|
|
for _, f := range a.resultFloats {
|
|
|
|
if f.metric.Get(model.MetricNameLabel) == expectedMetricName {
|
|
|
|
count++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return count
|
|
|
|
}
|
|
|
|
|
|
|
|
func getResultFloats(app *collectResultAppender, expectedMetricName string) (result []float64) {
|
|
|
|
app.mtx.Lock()
|
|
|
|
defer app.mtx.Unlock()
|
|
|
|
|
|
|
|
for _, f := range app.resultFloats {
|
|
|
|
if f.metric.Get(model.MetricNameLabel) == expectedMetricName {
|
|
|
|
result = append(result, f.f)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result
|
|
|
|
}
|