mirror of https://github.com/prometheus/prometheus
Merge branch 'main' into update-react-and-ts
Signed-off-by: Augustin Husson <husson.augustin@gmail.com>pull/9323/head
commit
c827413755
|
@ -1,3 +1,8 @@
|
|||
## 2.29.2 / 2021-08-27
|
||||
|
||||
* [BUGFIX] Fix Kubernetes SD failing to discover Ingress in Kubernetes v1.22. #9205
|
||||
* [BUGFIX] Fix data race in loading write-ahead-log (WAL). #9259
|
||||
|
||||
## 2.29.1 / 2021-08-11
|
||||
|
||||
* [BUGFIX] tsdb: align atomically accessed int64 to prevent panic in 32-bit
|
||||
|
|
|
@ -107,6 +107,7 @@ type flagConfig struct {
|
|||
outageTolerance model.Duration
|
||||
resendDelay model.Duration
|
||||
web web.Options
|
||||
scrape scrape.Options
|
||||
tsdb tsdbOptions
|
||||
lookbackDelta model.Duration
|
||||
webTimeout model.Duration
|
||||
|
@ -152,6 +153,9 @@ func (c *flagConfig) setFeatureListOptions(logger log.Logger) error {
|
|||
case "memory-snapshot-on-shutdown":
|
||||
c.tsdb.EnableMemorySnapshotOnShutdown = true
|
||||
level.Info(logger).Log("msg", "Experimental memory snapshot on shutdown enabled")
|
||||
case "extra-scrape-metrics":
|
||||
c.scrape.ExtraMetrics = true
|
||||
level.Info(logger).Log("msg", "Experimental additional scrape metrics")
|
||||
case "":
|
||||
continue
|
||||
default:
|
||||
|
@ -312,7 +316,7 @@ func main() {
|
|||
a.Flag("query.max-samples", "Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return.").
|
||||
Default("50000000").IntVar(&cfg.queryMaxSamples)
|
||||
|
||||
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, remote-write-receiver. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
|
||||
a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-at-modifier, promql-negative-offset, remote-write-receiver, extra-scrape-metrics. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details.").
|
||||
Default("").StringsVar(&cfg.featureList)
|
||||
|
||||
promlogflag.AddFlags(a, &cfg.promlogConfig)
|
||||
|
@ -457,7 +461,7 @@ func main() {
|
|||
ctxNotify, cancelNotify = context.WithCancel(context.Background())
|
||||
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"), discovery.Name("notify"))
|
||||
|
||||
scrapeManager = scrape.NewManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
|
||||
scrapeManager = scrape.NewManager(&cfg.scrape, log.With(logger, "component", "scrape manager"), fanoutStorage)
|
||||
|
||||
opts = promql.EngineOpts{
|
||||
Logger: log.With(logger, "component", "query engine"),
|
||||
|
|
|
@ -24,7 +24,7 @@ import (
|
|||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -48,6 +48,7 @@ import (
|
|||
_ "github.com/prometheus/prometheus/discovery/install" // Register service discovery implementations.
|
||||
"github.com/prometheus/prometheus/discovery/kubernetes"
|
||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/prometheus/prometheus/pkg/rulefmt"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
@ -471,8 +472,8 @@ func checkRules(filename string) (int, []error) {
|
|||
fmt.Printf("%d duplicate rule(s) found.\n", len(dRules))
|
||||
for _, n := range dRules {
|
||||
fmt.Printf("Metric: %s\nLabel(s):\n", n.metric)
|
||||
for i, l := range n.label {
|
||||
fmt.Printf("\t%s: %s\n", i, l)
|
||||
for _, l := range n.label {
|
||||
fmt.Printf("\t%s: %s\n", l.Name, l.Value)
|
||||
}
|
||||
}
|
||||
fmt.Println("Might cause inconsistency while recording expressions.")
|
||||
|
@ -483,29 +484,52 @@ func checkRules(filename string) (int, []error) {
|
|||
|
||||
type compareRuleType struct {
|
||||
metric string
|
||||
label map[string]string
|
||||
label labels.Labels
|
||||
}
|
||||
|
||||
type compareRuleTypes []compareRuleType
|
||||
|
||||
func (c compareRuleTypes) Len() int { return len(c) }
|
||||
func (c compareRuleTypes) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
|
||||
func (c compareRuleTypes) Less(i, j int) bool { return compare(c[i], c[j]) < 0 }
|
||||
|
||||
func compare(a, b compareRuleType) int {
|
||||
if res := strings.Compare(a.metric, b.metric); res != 0 {
|
||||
return res
|
||||
}
|
||||
|
||||
return labels.Compare(a.label, b.label)
|
||||
}
|
||||
|
||||
func checkDuplicates(groups []rulefmt.RuleGroup) []compareRuleType {
|
||||
var duplicates []compareRuleType
|
||||
var rules compareRuleTypes
|
||||
|
||||
for _, group := range groups {
|
||||
for index, rule := range group.Rules {
|
||||
inst := compareRuleType{
|
||||
|
||||
for _, rule := range group.Rules {
|
||||
rules = append(rules, compareRuleType{
|
||||
metric: ruleMetric(rule),
|
||||
label: rule.Labels,
|
||||
}
|
||||
for i := 0; i < index; i++ {
|
||||
t := compareRuleType{
|
||||
metric: ruleMetric(group.Rules[i]),
|
||||
label: group.Rules[i].Labels,
|
||||
}
|
||||
if reflect.DeepEqual(t, inst) {
|
||||
duplicates = append(duplicates, t)
|
||||
}
|
||||
}
|
||||
label: labels.FromMap(rule.Labels),
|
||||
})
|
||||
}
|
||||
}
|
||||
if len(rules) < 2 {
|
||||
return duplicates
|
||||
}
|
||||
sort.Sort(rules)
|
||||
|
||||
last := rules[0]
|
||||
for i := 1; i < len(rules); i++ {
|
||||
if compare(last, rules[i]) == 0 {
|
||||
// Don't add a duplicated rule multiple times.
|
||||
if len(duplicates) == 0 || compare(last, duplicates[len(duplicates)-1]) != 0 {
|
||||
duplicates = append(duplicates, rules[i])
|
||||
}
|
||||
}
|
||||
last = rules[i]
|
||||
}
|
||||
|
||||
return duplicates
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/prometheus/prometheus/pkg/rulefmt"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -118,3 +120,46 @@ func TestCheckSDFile(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCheckDuplicates(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
ruleFile string
|
||||
expectedDups []compareRuleType
|
||||
}{
|
||||
{
|
||||
name: "no duplicates",
|
||||
ruleFile: "./testdata/rules.yml",
|
||||
},
|
||||
{
|
||||
name: "duplicate in other group",
|
||||
ruleFile: "./testdata/rules_duplicates.yml",
|
||||
expectedDups: []compareRuleType{
|
||||
{
|
||||
metric: "job:test:count_over_time1m",
|
||||
label: labels.New(),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range cases {
|
||||
c := test
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
rgs, err := rulefmt.ParseFile(c.ruleFile)
|
||||
require.Empty(t, err)
|
||||
dups := checkDuplicates(rgs.Groups)
|
||||
require.Equal(t, c.expectedDups, dups)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCheckDuplicates(b *testing.B) {
|
||||
rgs, err := rulefmt.ParseFile("./testdata/rules_large.yml")
|
||||
require.Empty(b, err)
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
checkDuplicates(rgs.Groups)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
# This is a rules file with duplicate expressions
|
||||
|
||||
groups:
|
||||
- name: base
|
||||
rules:
|
||||
- record: job:test:count_over_time1m
|
||||
expr: sum without(instance) (count_over_time(test[1m]))
|
||||
|
||||
# A recording rule that doesn't depend on input series.
|
||||
- record: fixed_data
|
||||
expr: 1
|
||||
|
||||
# Subquery with default resolution test.
|
||||
- record: suquery_interval_test
|
||||
expr: count_over_time(up[5m:])
|
||||
|
||||
# Duplicating
|
||||
- record: job:test:count_over_time1m
|
||||
expr: sum without(instance) (count_over_time(test[1m]))
|
||||
|
||||
- name: duplicate
|
||||
rules:
|
||||
- record: job:test:count_over_time1m
|
||||
expr: sum without(instance) (count_over_time(test[1m]))
|
File diff suppressed because it is too large
Load Diff
|
@ -1302,6 +1302,10 @@ var expectedErrors = []struct {
|
|||
filename: "http_url_bad_scheme.bad.yml",
|
||||
errMsg: "URL scheme must be 'http' or 'https'",
|
||||
},
|
||||
{
|
||||
filename: "empty_scrape_config_action.bad.yml",
|
||||
errMsg: "relabel action cannot be empty",
|
||||
},
|
||||
}
|
||||
|
||||
func TestBadConfigs(t *testing.T) {
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
scrape_configs:
|
||||
- job_name: prometheus
|
||||
relabel_configs:
|
||||
- action: null
|
|
@ -220,9 +220,8 @@ func (i *Ingress) buildIngress(ingress ingressAdaptor) *targetgroup.Group {
|
|||
}
|
||||
|
||||
// matchesHostnamePattern returns true if the host matches a wildcard DNS
|
||||
// pattern or pattern and host are equal
|
||||
// pattern or pattern and host are equal.
|
||||
func matchesHostnamePattern(pattern, host string) bool {
|
||||
// check for exact match
|
||||
if pattern == host {
|
||||
return true
|
||||
}
|
||||
|
@ -230,13 +229,13 @@ func matchesHostnamePattern(pattern, host string) bool {
|
|||
patternParts := strings.Split(pattern, ".")
|
||||
hostParts := strings.Split(host, ".")
|
||||
|
||||
// if they are not equal, we cna check if we need to match
|
||||
// on a wildcard or else give up
|
||||
// If the first element of the pattern is not a wildcard, give up.
|
||||
if len(patternParts) == 0 || patternParts[0] != "*" {
|
||||
return false
|
||||
}
|
||||
|
||||
// to get a valid wildcard match the parts will need to be the same length
|
||||
// A wildcard match require the pattern to have the same length as the host
|
||||
// path.
|
||||
if len(patternParts) != len(hostParts) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -19,11 +19,11 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/prometheus/common/model"
|
||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||
|
||||
v1 "k8s.io/api/networking/v1"
|
||||
"k8s.io/api/networking/v1beta1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"github.com/prometheus/prometheus/discovery/targetgroup"
|
||||
)
|
||||
|
||||
type TLSMode int
|
||||
|
|
|
@ -2172,6 +2172,9 @@ it was not set during relabeling. The `__scheme__` and `__metrics_path__` labels
|
|||
are set to the scheme and metrics path of the target respectively. The `__param_<name>`
|
||||
label is set to the value of the first passed URL parameter called `<name>`.
|
||||
|
||||
The `__scrape_interval__` and `__scrape_timeout__` labels are set to the target's
|
||||
interval and timeout. This is **experimental** and could change in the future.
|
||||
|
||||
Additional labels prefixed with `__meta_` may be available during the
|
||||
relabeling phase. They are set by the service discovery mechanism that provided
|
||||
the target and vary between mechanisms.
|
||||
|
|
|
@ -34,7 +34,7 @@ that PromQL does not look ahead of the evaluation time for samples.
|
|||
`--enable-feature=promql-negative-offset`
|
||||
|
||||
In contrast to the positive offset modifier, the negative offset modifier lets
|
||||
one shift a vector selector into the future. An example in which one may want
|
||||
one shift a vector selector into the future. An example in which one may want
|
||||
to use a negative offset is reviewing past data and making temporal comparisons
|
||||
with more recent data.
|
||||
|
||||
|
@ -59,5 +59,15 @@ Exemplar storage is implemented as a fixed size circular buffer that stores exem
|
|||
`--enable-feature=memory-snapshot-on-shutdown`
|
||||
|
||||
This takes the snapshot of the chunks that are in memory along with the series information when shutting down and stores
|
||||
it on disk. This will reduce the startup time since the memory state can be restored with this snapshot and m-mapped
|
||||
it on disk. This will reduce the startup time since the memory state can be restored with this snapshot and m-mapped
|
||||
chunks without the need of WAL replay.
|
||||
|
||||
## Extra Scrape Metrics
|
||||
|
||||
`--enable-feature=extra-scrape-metrics`
|
||||
|
||||
When enabled, for each instance scrape, Prometheus stores a sample in the following additional time series:
|
||||
|
||||
- `scrape_timeout_seconds`. The configured `scrape_timeout` for a target. This allows you to measure each target to find out how close they are to timing out with `scrape_duration_seconds / scrape_timeout_seconds`.
|
||||
- `scrape_sample_limit`. The configured `sample_limit` for a target. This allows you to measure each target
|
||||
to find out how close they are to reaching the limit with `scrape_samples_post_metric_relabeling / scrape_sample_limit`. Note that `scrape_sample_limit` can be zero if there is no limit configured, which means that the query above can return `+Inf` for targets with no limit (as we divide by zero). If you want to query only for targets that do have a sample limit use this query: `scrape_samples_post_metric_relabeling / (scrape_sample_limit > 0)`.
|
||||
|
|
|
@ -502,7 +502,9 @@ $ curl http://localhost:9090/api/v1/targets
|
|||
"lastError": "",
|
||||
"lastScrape": "2017-01-17T15:07:44.723715405+01:00",
|
||||
"lastScrapeDuration": 0.050688943,
|
||||
"health": "up"
|
||||
"health": "up",
|
||||
"scrapeInterval": "1m",
|
||||
"scrapeTimeout": "10s"
|
||||
}
|
||||
],
|
||||
"droppedTargets": [
|
||||
|
@ -511,6 +513,8 @@ $ curl http://localhost:9090/api/v1/targets
|
|||
"__address__": "127.0.0.1:9100",
|
||||
"__metrics_path__": "/metrics",
|
||||
"__scheme__": "http",
|
||||
"__scrape_interval__": "1m",
|
||||
"__scrape_timeout__": "10s",
|
||||
"job": "node"
|
||||
},
|
||||
}
|
||||
|
|
|
@ -100,6 +100,9 @@ func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||
if c.Regex.Regexp == nil {
|
||||
c.Regex = MustNewRegexp("")
|
||||
}
|
||||
if c.Action == "" {
|
||||
return errors.Errorf("relabel action cannot be empty")
|
||||
}
|
||||
if c.Modulus == 0 && c.Action == HashMod {
|
||||
return errors.Errorf("relabel configuration for hashmod requires non-zero modulus")
|
||||
}
|
||||
|
|
|
@ -182,7 +182,10 @@ func (node *UnaryExpr) String() string {
|
|||
}
|
||||
|
||||
func (node *VectorSelector) String() string {
|
||||
labelStrings := make([]string, 0, len(node.LabelMatchers)-1)
|
||||
var labelStrings []string
|
||||
if len(node.LabelMatchers) > 1 {
|
||||
labelStrings = make([]string, 0, len(node.LabelMatchers)-1)
|
||||
}
|
||||
for _, matcher := range node.LabelMatchers {
|
||||
// Only include the __name__ label if its equality matching and matches the name.
|
||||
if matcher.Name == labels.MetricName && matcher.Type == labels.MatchEqual && matcher.Value == node.Name {
|
||||
|
|
|
@ -16,6 +16,8 @@ package parser
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -138,3 +140,76 @@ func TestExprString(t *testing.T) {
|
|||
require.Equal(t, exp, expr.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestVectorSelector_String(t *testing.T) {
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
vs VectorSelector
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty value",
|
||||
vs: VectorSelector{},
|
||||
expected: ``,
|
||||
},
|
||||
{
|
||||
name: "no matchers with name",
|
||||
vs: VectorSelector{Name: "foobar"},
|
||||
expected: `foobar`,
|
||||
},
|
||||
{
|
||||
name: "one matcher with name",
|
||||
vs: VectorSelector{
|
||||
Name: "foobar",
|
||||
LabelMatchers: []*labels.Matcher{
|
||||
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
|
||||
},
|
||||
},
|
||||
expected: `foobar{a="x"}`,
|
||||
},
|
||||
{
|
||||
name: "two matchers with name",
|
||||
vs: VectorSelector{
|
||||
Name: "foobar",
|
||||
LabelMatchers: []*labels.Matcher{
|
||||
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
|
||||
labels.MustNewMatcher(labels.MatchEqual, "b", "y"),
|
||||
},
|
||||
},
|
||||
expected: `foobar{a="x",b="y"}`,
|
||||
},
|
||||
{
|
||||
name: "two matchers without name",
|
||||
vs: VectorSelector{
|
||||
LabelMatchers: []*labels.Matcher{
|
||||
labels.MustNewMatcher(labels.MatchEqual, "a", "x"),
|
||||
labels.MustNewMatcher(labels.MatchEqual, "b", "y"),
|
||||
},
|
||||
},
|
||||
expected: `{a="x",b="y"}`,
|
||||
},
|
||||
{
|
||||
name: "name matcher and name",
|
||||
vs: VectorSelector{
|
||||
Name: "foobar",
|
||||
LabelMatchers: []*labels.Matcher{
|
||||
labels.MustNewMatcher(labels.MatchEqual, labels.MetricName, "foobar"),
|
||||
},
|
||||
},
|
||||
expected: `foobar`,
|
||||
},
|
||||
{
|
||||
name: "name matcher only",
|
||||
vs: VectorSelector{
|
||||
LabelMatchers: []*labels.Matcher{
|
||||
labels.MustNewMatcher(labels.MatchEqual, labels.MetricName, "foobar"),
|
||||
},
|
||||
},
|
||||
expected: `{__name__="foobar"}`,
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
require.Equal(t, tc.expected, tc.vs.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,12 +99,16 @@ func (mc *MetadataMetricsCollector) Collect(ch chan<- prometheus.Metric) {
|
|||
}
|
||||
|
||||
// NewManager is the Manager constructor
|
||||
func NewManager(logger log.Logger, app storage.Appendable) *Manager {
|
||||
func NewManager(o *Options, logger log.Logger, app storage.Appendable) *Manager {
|
||||
if o == nil {
|
||||
o = &Options{}
|
||||
}
|
||||
if logger == nil {
|
||||
logger = log.NewNopLogger()
|
||||
}
|
||||
m := &Manager{
|
||||
append: app,
|
||||
opts: o,
|
||||
logger: logger,
|
||||
scrapeConfigs: make(map[string]*config.ScrapeConfig),
|
||||
scrapePools: make(map[string]*scrapePool),
|
||||
|
@ -116,9 +120,15 @@ func NewManager(logger log.Logger, app storage.Appendable) *Manager {
|
|||
return m
|
||||
}
|
||||
|
||||
// Options are the configuration parameters to the scrape manager.
|
||||
type Options struct {
|
||||
ExtraMetrics bool
|
||||
}
|
||||
|
||||
// Manager maintains a set of scrape pools and manages start/stop cycles
|
||||
// when receiving new target groups from the discovery manager.
|
||||
type Manager struct {
|
||||
opts *Options
|
||||
logger log.Logger
|
||||
append storage.Appendable
|
||||
graceShut chan struct{}
|
||||
|
@ -181,7 +191,7 @@ func (m *Manager) reload() {
|
|||
level.Error(m.logger).Log("msg", "error reloading target set", "err", "invalid config id:"+setName)
|
||||
continue
|
||||
}
|
||||
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName))
|
||||
sp, err := newScrapePool(scrapeConfig, m.append, m.jitterSeed, log.With(m.logger, "scrape_pool", setName), m.opts.ExtraMetrics)
|
||||
if err != nil {
|
||||
level.Error(m.logger).Log("msg", "error creating new scrape pool", "err", err, "scrape_pool", setName)
|
||||
continue
|
||||
|
|
|
@ -44,52 +44,66 @@ func TestPopulateLabels(t *testing.T) {
|
|||
"custom": "value",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.InstanceLabel: "1.2.3.4:1000",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "value",
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.InstanceLabel: "1.2.3.4:1000",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
"custom": "value",
|
||||
}),
|
||||
resOrig: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "value",
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "value",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
}),
|
||||
},
|
||||
// Pre-define/overwrite scrape config labels.
|
||||
// Leave out port and expect it to be defaulted to scheme.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.AddressLabel: "1.2.3.4",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.ScrapeIntervalLabel: "2s",
|
||||
model.ScrapeTimeoutLabel: "2s",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:80",
|
||||
model.InstanceLabel: "1.2.3.4:80",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.AddressLabel: "1.2.3.4:80",
|
||||
model.InstanceLabel: "1.2.3.4:80",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.ScrapeIntervalLabel: "2s",
|
||||
model.ScrapeTimeoutLabel: "2s",
|
||||
}),
|
||||
resOrig: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.AddressLabel: "1.2.3.4",
|
||||
model.SchemeLabel: "http",
|
||||
model.MetricsPathLabel: "/custom",
|
||||
model.JobLabel: "custom-job",
|
||||
model.ScrapeIntervalLabel: "2s",
|
||||
model.ScrapeTimeoutLabel: "2s",
|
||||
}),
|
||||
},
|
||||
// Provide instance label. HTTPS port default for IPv6.
|
||||
|
@ -99,32 +113,40 @@ func TestPopulateLabels(t *testing.T) {
|
|||
model.InstanceLabel: "custom-instance",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "[::1]:443",
|
||||
model.InstanceLabel: "custom-instance",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.AddressLabel: "[::1]:443",
|
||||
model.InstanceLabel: "custom-instance",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
}),
|
||||
resOrig: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "[::1]",
|
||||
model.InstanceLabel: "custom-instance",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.AddressLabel: "[::1]",
|
||||
model.InstanceLabel: "custom-instance",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
}),
|
||||
},
|
||||
// Address label missing.
|
||||
{
|
||||
in: labels.FromStrings("custom", "value"),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
|
@ -134,9 +156,11 @@ func TestPopulateLabels(t *testing.T) {
|
|||
{
|
||||
in: labels.FromStrings("custom", "host:1234"),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
RelabelConfigs: []*relabel.Config{
|
||||
{
|
||||
Action: relabel.Replace,
|
||||
|
@ -148,27 +172,33 @@ func TestPopulateLabels(t *testing.T) {
|
|||
},
|
||||
},
|
||||
res: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "host:1234",
|
||||
model.InstanceLabel: "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "host:1234",
|
||||
model.AddressLabel: "host:1234",
|
||||
model.InstanceLabel: "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
"custom": "host:1234",
|
||||
}),
|
||||
resOrig: labels.FromMap(map[string]string{
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
"custom": "host:1234",
|
||||
}),
|
||||
},
|
||||
// Address label missing, but added in relabelling.
|
||||
{
|
||||
in: labels.FromStrings("custom", "host:1234"),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
RelabelConfigs: []*relabel.Config{
|
||||
{
|
||||
Action: relabel.Replace,
|
||||
|
@ -180,18 +210,22 @@ func TestPopulateLabels(t *testing.T) {
|
|||
},
|
||||
},
|
||||
res: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "host:1234",
|
||||
model.InstanceLabel: "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "host:1234",
|
||||
model.AddressLabel: "host:1234",
|
||||
model.InstanceLabel: "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
"custom": "host:1234",
|
||||
}),
|
||||
resOrig: labels.FromMap(map[string]string{
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
"custom": "host:1234",
|
||||
model.SchemeLabel: "https",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "job",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "1s",
|
||||
"custom": "host:1234",
|
||||
}),
|
||||
},
|
||||
// Invalid UTF-8 in label.
|
||||
|
@ -201,14 +235,102 @@ func TestPopulateLabels(t *testing.T) {
|
|||
"custom": "\xbd",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "invalid label value for \"custom\": \"\\xbd\"",
|
||||
},
|
||||
// Invalid duration in interval label.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.ScrapeIntervalLabel: "2notseconds",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "error parsing scrape interval: not a valid duration string: \"2notseconds\"",
|
||||
},
|
||||
// Invalid duration in timeout label.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.ScrapeTimeoutLabel: "2notseconds",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "error parsing scrape timeout: not a valid duration string: \"2notseconds\"",
|
||||
},
|
||||
// 0 interval in timeout label.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.ScrapeIntervalLabel: "0s",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "scrape interval cannot be 0",
|
||||
},
|
||||
// 0 duration in timeout label.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.ScrapeTimeoutLabel: "0s",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "scrape timeout cannot be 0",
|
||||
},
|
||||
// Timeout less than interval.
|
||||
{
|
||||
in: labels.FromMap(map[string]string{
|
||||
model.AddressLabel: "1.2.3.4:1000",
|
||||
model.ScrapeIntervalLabel: "1s",
|
||||
model.ScrapeTimeoutLabel: "2s",
|
||||
}),
|
||||
cfg: &config.ScrapeConfig{
|
||||
Scheme: "https",
|
||||
MetricsPath: "/metrics",
|
||||
JobName: "job",
|
||||
ScrapeInterval: model.Duration(time.Second),
|
||||
ScrapeTimeout: model.Duration(time.Second),
|
||||
},
|
||||
res: nil,
|
||||
resOrig: nil,
|
||||
err: "scrape timeout cannot be greater than scrape interval (\"2s\" > \"1s\")",
|
||||
},
|
||||
}
|
||||
for _, c := range cases {
|
||||
in := c.in.Copy()
|
||||
|
@ -276,7 +398,8 @@ scrape_configs:
|
|||
ch = make(chan struct{}, 1)
|
||||
)
|
||||
|
||||
scrapeManager := NewManager(nil, nil)
|
||||
opts := Options{}
|
||||
scrapeManager := NewManager(&opts, nil, nil)
|
||||
newLoop := func(scrapeLoopOptions) loop {
|
||||
ch <- struct{}{}
|
||||
return noopLoop()
|
||||
|
@ -338,7 +461,8 @@ scrape_configs:
|
|||
}
|
||||
|
||||
func TestManagerTargetsUpdates(t *testing.T) {
|
||||
m := NewManager(nil, nil)
|
||||
opts := Options{}
|
||||
m := NewManager(&opts, nil, nil)
|
||||
|
||||
ts := make(chan map[string][]*targetgroup.Group)
|
||||
go m.Run(ts)
|
||||
|
@ -390,7 +514,8 @@ global:
|
|||
return cfg
|
||||
}
|
||||
|
||||
scrapeManager := NewManager(nil, nil)
|
||||
opts := Options{}
|
||||
scrapeManager := NewManager(&opts, nil, nil)
|
||||
|
||||
// Load the first config.
|
||||
cfg1 := getConfig("ha1")
|
||||
|
|
|
@ -253,6 +253,8 @@ type scrapeLoopOptions struct {
|
|||
labelLimits *labelLimits
|
||||
honorLabels bool
|
||||
honorTimestamps bool
|
||||
interval time.Duration
|
||||
timeout time.Duration
|
||||
mrc []*relabel.Config
|
||||
cache *scrapeCache
|
||||
}
|
||||
|
@ -261,7 +263,7 @@ const maxAheadTime = 10 * time.Minute
|
|||
|
||||
type labelsMutator func(labels.Labels) labels.Labels
|
||||
|
||||
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger) (*scrapePool, error) {
|
||||
func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed uint64, logger log.Logger, reportScrapeTimeout bool) (*scrapePool, error) {
|
||||
targetScrapePools.Inc()
|
||||
if logger == nil {
|
||||
logger = log.NewNopLogger()
|
||||
|
@ -306,7 +308,11 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, jitterSeed
|
|||
cache,
|
||||
jitterSeed,
|
||||
opts.honorTimestamps,
|
||||
opts.sampleLimit,
|
||||
opts.labelLimits,
|
||||
opts.interval,
|
||||
opts.timeout,
|
||||
reportScrapeTimeout,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -414,6 +420,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
|||
} else {
|
||||
cache = newScrapeCache()
|
||||
}
|
||||
|
||||
var (
|
||||
t = sp.activeTargets[fp]
|
||||
s = &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
|
||||
|
@ -426,6 +433,8 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
|||
honorTimestamps: honorTimestamps,
|
||||
mrc: mrc,
|
||||
cache: cache,
|
||||
interval: interval,
|
||||
timeout: timeout,
|
||||
})
|
||||
)
|
||||
wg.Add(1)
|
||||
|
@ -435,7 +444,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
|||
wg.Done()
|
||||
|
||||
newLoop.setForcedError(forcedErr)
|
||||
newLoop.run(interval, timeout, nil)
|
||||
newLoop.run(nil)
|
||||
}(oldLoop, newLoop)
|
||||
|
||||
sp.loops[fp] = newLoop
|
||||
|
@ -509,6 +518,12 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||
hash := t.hash()
|
||||
|
||||
if _, ok := sp.activeTargets[hash]; !ok {
|
||||
// The scrape interval and timeout labels are set to the config's values initially,
|
||||
// so whether changed via relabeling or not, they'll exist and hold the correct values
|
||||
// for every target.
|
||||
var err error
|
||||
interval, timeout, err = t.intervalAndTimeout(interval, timeout)
|
||||
|
||||
s := &targetScraper{Target: t, client: sp.client, timeout: timeout, bodySizeLimit: bodySizeLimit}
|
||||
l := sp.newLoop(scrapeLoopOptions{
|
||||
target: t,
|
||||
|
@ -518,7 +533,12 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||
honorLabels: honorLabels,
|
||||
honorTimestamps: honorTimestamps,
|
||||
mrc: mrc,
|
||||
interval: interval,
|
||||
timeout: timeout,
|
||||
})
|
||||
if err != nil {
|
||||
l.setForcedError(err)
|
||||
}
|
||||
|
||||
sp.activeTargets[hash] = t
|
||||
sp.loops[hash] = l
|
||||
|
@ -560,7 +580,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||
}
|
||||
for _, l := range uniqueLoops {
|
||||
if l != nil {
|
||||
go l.run(interval, timeout, nil)
|
||||
go l.run(nil)
|
||||
}
|
||||
}
|
||||
// Wait for all potentially stopped scrapers to terminate.
|
||||
|
@ -772,7 +792,7 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error)
|
|||
|
||||
// A loop can run and be stopped again. It must not be reused after it was stopped.
|
||||
type loop interface {
|
||||
run(interval, timeout time.Duration, errc chan<- error)
|
||||
run(errc chan<- error)
|
||||
setForcedError(err error)
|
||||
stop()
|
||||
getCache() *scrapeCache
|
||||
|
@ -796,7 +816,10 @@ type scrapeLoop struct {
|
|||
honorTimestamps bool
|
||||
forcedErr error
|
||||
forcedErrMtx sync.Mutex
|
||||
sampleLimit int
|
||||
labelLimits *labelLimits
|
||||
interval time.Duration
|
||||
timeout time.Duration
|
||||
|
||||
appender func(ctx context.Context) storage.Appender
|
||||
sampleMutator labelsMutator
|
||||
|
@ -808,6 +831,8 @@ type scrapeLoop struct {
|
|||
stopped chan struct{}
|
||||
|
||||
disabledEndOfRunStalenessMarkers bool
|
||||
|
||||
reportScrapeTimeout bool
|
||||
}
|
||||
|
||||
// scrapeCache tracks mappings of exposed metric strings to label sets and
|
||||
|
@ -1064,7 +1089,11 @@ func newScrapeLoop(ctx context.Context,
|
|||
cache *scrapeCache,
|
||||
jitterSeed uint64,
|
||||
honorTimestamps bool,
|
||||
sampleLimit int,
|
||||
labelLimits *labelLimits,
|
||||
interval time.Duration,
|
||||
timeout time.Duration,
|
||||
reportScrapeTimeout bool,
|
||||
) *scrapeLoop {
|
||||
if l == nil {
|
||||
l = log.NewNopLogger()
|
||||
|
@ -1087,16 +1116,20 @@ func newScrapeLoop(ctx context.Context,
|
|||
l: l,
|
||||
parentCtx: ctx,
|
||||
honorTimestamps: honorTimestamps,
|
||||
sampleLimit: sampleLimit,
|
||||
labelLimits: labelLimits,
|
||||
interval: interval,
|
||||
timeout: timeout,
|
||||
reportScrapeTimeout: reportScrapeTimeout,
|
||||
}
|
||||
sl.ctx, sl.cancel = context.WithCancel(ctx)
|
||||
|
||||
return sl
|
||||
}
|
||||
|
||||
func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
|
||||
func (sl *scrapeLoop) run(errc chan<- error) {
|
||||
select {
|
||||
case <-time.After(sl.scraper.offset(interval, sl.jitterSeed)):
|
||||
case <-time.After(sl.scraper.offset(sl.interval, sl.jitterSeed)):
|
||||
// Continue after a scraping offset.
|
||||
case <-sl.ctx.Done():
|
||||
close(sl.stopped)
|
||||
|
@ -1106,7 +1139,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
|
|||
var last time.Time
|
||||
|
||||
alignedScrapeTime := time.Now().Round(0)
|
||||
ticker := time.NewTicker(interval)
|
||||
ticker := time.NewTicker(sl.interval)
|
||||
defer ticker.Stop()
|
||||
|
||||
mainLoop:
|
||||
|
@ -1126,11 +1159,11 @@ mainLoop:
|
|||
// Calling Round ensures the time used is the wall clock, as otherwise .Sub
|
||||
// and .Add on time.Time behave differently (see time package docs).
|
||||
scrapeTime := time.Now().Round(0)
|
||||
if AlignScrapeTimestamps && interval > 100*scrapeTimestampTolerance {
|
||||
if AlignScrapeTimestamps && sl.interval > 100*scrapeTimestampTolerance {
|
||||
// For some reason, a tick might have been skipped, in which case we
|
||||
// would call alignedScrapeTime.Add(interval) multiple times.
|
||||
for scrapeTime.Sub(alignedScrapeTime) >= interval {
|
||||
alignedScrapeTime = alignedScrapeTime.Add(interval)
|
||||
for scrapeTime.Sub(alignedScrapeTime) >= sl.interval {
|
||||
alignedScrapeTime = alignedScrapeTime.Add(sl.interval)
|
||||
}
|
||||
// Align the scrape time if we are in the tolerance boundaries.
|
||||
if scrapeTime.Sub(alignedScrapeTime) <= scrapeTimestampTolerance {
|
||||
|
@ -1138,7 +1171,7 @@ mainLoop:
|
|||
}
|
||||
}
|
||||
|
||||
last = sl.scrapeAndReport(interval, timeout, last, scrapeTime, errc)
|
||||
last = sl.scrapeAndReport(sl.interval, sl.timeout, last, scrapeTime, errc)
|
||||
|
||||
select {
|
||||
case <-sl.parentCtx.Done():
|
||||
|
@ -1153,7 +1186,7 @@ mainLoop:
|
|||
close(sl.stopped)
|
||||
|
||||
if !sl.disabledEndOfRunStalenessMarkers {
|
||||
sl.endOfRunStaleness(last, ticker, interval)
|
||||
sl.endOfRunStaleness(last, ticker, sl.interval)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1192,7 +1225,7 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last, app
|
|||
}()
|
||||
|
||||
defer func() {
|
||||
if err = sl.report(app, appendTime, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
|
||||
if err = sl.report(app, appendTime, timeout, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
|
||||
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
|
||||
}
|
||||
}()
|
||||
|
@ -1580,9 +1613,11 @@ const (
|
|||
scrapeSamplesMetricName = "scrape_samples_scraped" + "\xff"
|
||||
samplesPostRelabelMetricName = "scrape_samples_post_metric_relabeling" + "\xff"
|
||||
scrapeSeriesAddedMetricName = "scrape_series_added" + "\xff"
|
||||
scrapeTimeoutMetricName = "scrape_timeout_seconds" + "\xff"
|
||||
scrapeSampleLimitMetricName = "scrape_sample_limit" + "\xff"
|
||||
)
|
||||
|
||||
func (sl *scrapeLoop) report(app storage.Appender, start time.Time, duration time.Duration, scraped, added, seriesAdded int, scrapeErr error) (err error) {
|
||||
func (sl *scrapeLoop) report(app storage.Appender, start time.Time, timeout, duration time.Duration, scraped, added, seriesAdded int, scrapeErr error) (err error) {
|
||||
sl.scraper.Report(start, duration, scrapeErr)
|
||||
|
||||
ts := timestamp.FromTime(start)
|
||||
|
@ -1607,6 +1642,14 @@ func (sl *scrapeLoop) report(app storage.Appender, start time.Time, duration tim
|
|||
if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, float64(seriesAdded)); err != nil {
|
||||
return
|
||||
}
|
||||
if sl.reportScrapeTimeout {
|
||||
if err = sl.addReportSample(app, scrapeTimeoutMetricName, ts, timeout.Seconds()); err != nil {
|
||||
return
|
||||
}
|
||||
if err = sl.addReportSample(app, scrapeSampleLimitMetricName, ts, float64(sl.sampleLimit)); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -1630,6 +1673,14 @@ func (sl *scrapeLoop) reportStale(app storage.Appender, start time.Time) (err er
|
|||
if err = sl.addReportSample(app, scrapeSeriesAddedMetricName, ts, stale); err != nil {
|
||||
return
|
||||
}
|
||||
if sl.reportScrapeTimeout {
|
||||
if err = sl.addReportSample(app, scrapeTimeoutMetricName, ts, stale); err != nil {
|
||||
return
|
||||
}
|
||||
if err = sl.addReportSample(app, scrapeSampleLimitMetricName, ts, stale); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ func TestNewScrapePool(t *testing.T) {
|
|||
var (
|
||||
app = &nopAppendable{}
|
||||
cfg = &config.ScrapeConfig{}
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil)
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil, false)
|
||||
)
|
||||
|
||||
if a, ok := sp.appendable.(*nopAppendable); !ok || a != app {
|
||||
|
@ -92,8 +92,8 @@ func TestDroppedTargetsList(t *testing.T) {
|
|||
},
|
||||
},
|
||||
}
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil)
|
||||
expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", job=\"dropMe\"}"
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil, false)
|
||||
expectedLabelSetString = "{__address__=\"127.0.0.1:9090\", __scrape_interval__=\"0s\", __scrape_timeout__=\"0s\", job=\"dropMe\"}"
|
||||
expectedLength = 1
|
||||
)
|
||||
sp.Sync(tgs)
|
||||
|
@ -146,14 +146,16 @@ type testLoop struct {
|
|||
forcedErr error
|
||||
forcedErrMtx sync.Mutex
|
||||
runOnce bool
|
||||
interval time.Duration
|
||||
timeout time.Duration
|
||||
}
|
||||
|
||||
func (l *testLoop) run(interval, timeout time.Duration, errc chan<- error) {
|
||||
func (l *testLoop) run(errc chan<- error) {
|
||||
if l.runOnce {
|
||||
panic("loop must be started only once")
|
||||
}
|
||||
l.runOnce = true
|
||||
l.startFunc(interval, timeout, errc)
|
||||
l.startFunc(l.interval, l.timeout, errc)
|
||||
}
|
||||
|
||||
func (l *testLoop) disableEndOfRunStalenessMarkers() {
|
||||
|
@ -250,7 +252,7 @@ func TestScrapePoolReload(t *testing.T) {
|
|||
// On starting to run, new loops created on reload check whether their preceding
|
||||
// equivalents have been stopped.
|
||||
newLoop := func(opts scrapeLoopOptions) loop {
|
||||
l := &testLoop{}
|
||||
l := &testLoop{interval: time.Duration(reloadCfg.ScrapeInterval), timeout: time.Duration(reloadCfg.ScrapeTimeout)}
|
||||
l.startFunc = func(interval, timeout time.Duration, errc chan<- error) {
|
||||
require.Equal(t, 3*time.Second, interval, "Unexpected scrape interval")
|
||||
require.Equal(t, 2*time.Second, timeout, "Unexpected scrape timeout")
|
||||
|
@ -276,8 +278,10 @@ func TestScrapePoolReload(t *testing.T) {
|
|||
// one terminated.
|
||||
|
||||
for i := 0; i < numTargets; i++ {
|
||||
labels := labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i))
|
||||
t := &Target{
|
||||
labels: labels.FromStrings(model.AddressLabel, fmt.Sprintf("example.com:%d", i)),
|
||||
labels: labels,
|
||||
discoveredLabels: labels,
|
||||
}
|
||||
l := &testLoop{}
|
||||
l.stopFunc = func() {
|
||||
|
@ -342,7 +346,7 @@ func TestScrapePoolTargetLimit(t *testing.T) {
|
|||
activeTargets: map[uint64]*Target{},
|
||||
loops: map[uint64]loop{},
|
||||
newLoop: newLoop,
|
||||
logger: nil,
|
||||
logger: log.NewNopLogger(),
|
||||
client: http.DefaultClient,
|
||||
}
|
||||
|
||||
|
@ -452,7 +456,7 @@ func TestScrapePoolTargetLimit(t *testing.T) {
|
|||
func TestScrapePoolAppender(t *testing.T) {
|
||||
cfg := &config.ScrapeConfig{}
|
||||
app := &nopAppendable{}
|
||||
sp, _ := newScrapePool(cfg, app, 0, nil)
|
||||
sp, _ := newScrapePool(cfg, app, 0, nil, false)
|
||||
|
||||
loop := sp.newLoop(scrapeLoopOptions{
|
||||
target: &Target{},
|
||||
|
@ -488,12 +492,12 @@ func TestScrapePoolAppender(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestScrapePoolRaces(t *testing.T) {
|
||||
interval, _ := model.ParseDuration("500ms")
|
||||
timeout, _ := model.ParseDuration("1s")
|
||||
interval, _ := model.ParseDuration("1s")
|
||||
timeout, _ := model.ParseDuration("500ms")
|
||||
newConfig := func() *config.ScrapeConfig {
|
||||
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
|
||||
}
|
||||
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil)
|
||||
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, false)
|
||||
tgts := []*targetgroup.Group{
|
||||
{
|
||||
Targets: []model.LabelSet{
|
||||
|
@ -582,7 +586,11 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
|
|||
nopMutator,
|
||||
nil, nil, 0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
1,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
// The scrape pool synchronizes on stopping scrape loops. However, new scrape
|
||||
|
@ -611,7 +619,7 @@ func TestScrapeLoopStopBeforeRun(t *testing.T) {
|
|||
|
||||
runDone := make(chan struct{})
|
||||
go func() {
|
||||
sl.run(1, 0, nil)
|
||||
sl.run(nil)
|
||||
close(runDone)
|
||||
}()
|
||||
|
||||
|
@ -647,7 +655,11 @@ func TestScrapeLoopStop(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
// Terminate loop after 2 scrapes.
|
||||
|
@ -664,7 +676,7 @@ func TestScrapeLoopStop(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -715,7 +727,11 @@ func TestScrapeLoopRun(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
time.Second,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
// The loop must terminate during the initial offset if the context
|
||||
|
@ -723,7 +739,7 @@ func TestScrapeLoopRun(t *testing.T) {
|
|||
scraper.offsetDur = time.Hour
|
||||
|
||||
go func() {
|
||||
sl.run(time.Second, time.Hour, errc)
|
||||
sl.run(errc)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -763,11 +779,15 @@ func TestScrapeLoopRun(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
time.Second,
|
||||
100*time.Millisecond,
|
||||
false,
|
||||
)
|
||||
|
||||
go func() {
|
||||
sl.run(time.Second, 100*time.Millisecond, errc)
|
||||
sl.run(errc)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -815,7 +835,11 @@ func TestScrapeLoopForcedErr(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
time.Second,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
forcedErr := fmt.Errorf("forced err")
|
||||
|
@ -827,7 +851,7 @@ func TestScrapeLoopForcedErr(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(time.Second, time.Hour, errc)
|
||||
sl.run(errc)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -866,7 +890,11 @@ func TestScrapeLoopMetadata(t *testing.T) {
|
|||
cache,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
defer cancel()
|
||||
|
||||
|
@ -916,7 +944,11 @@ func TestScrapeLoopSeriesAdded(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
defer cancel()
|
||||
|
||||
|
@ -955,7 +987,11 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
// Succeed once, several failures, then stop.
|
||||
numScrapes := 0
|
||||
|
@ -973,7 +1009,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -1010,7 +1046,11 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
// Succeed once, several failures, then stop.
|
||||
|
@ -1030,7 +1070,7 @@ func TestScrapeLoopRunCreatesStaleMarkersOnParseFailure(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -1069,7 +1109,11 @@ func TestScrapeLoopCache(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
numScrapes := 0
|
||||
|
@ -1106,7 +1150,7 @@ func TestScrapeLoopCache(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -1144,7 +1188,11 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
numScrapes := 0
|
||||
|
@ -1164,7 +1212,7 @@ func TestScrapeLoopCacheMemoryExhaustionProtection(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -1251,7 +1299,11 @@ func TestScrapeLoopAppend(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1293,7 +1345,11 @@ func TestScrapeLoopAppendCacheEntryButErrNotFound(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
fakeRef := uint64(1)
|
||||
|
@ -1343,7 +1399,11 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
app.limit,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
// Get the value of the Counter before performing the append.
|
||||
|
@ -1413,7 +1473,11 @@ func TestScrapeLoop_ChangingMetricString(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1454,7 +1518,11 @@ func TestScrapeLoopAppendStaleness(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1498,7 +1566,11 @@ func TestScrapeLoopAppendNoStalenessIfTimestamp(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1600,7 +1672,11 @@ metric_total{n="2"} 2 # {t="2"} 2.0 20000
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1658,7 +1734,11 @@ func TestScrapeLoopAppendExemplarSeries(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -1703,7 +1783,11 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
|
||||
|
@ -1711,7 +1795,7 @@ func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
|
|||
return errors.New("scrape failed")
|
||||
}
|
||||
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
|
||||
}
|
||||
|
||||
|
@ -1732,7 +1816,11 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
|
||||
|
@ -1741,7 +1829,7 @@ func TestScrapeLoopRunReportsTargetDownOnInvalidUTF8(t *testing.T) {
|
|||
return nil
|
||||
}
|
||||
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
require.Equal(t, 0.0, appender.result[0].v, "bad 'up' value")
|
||||
}
|
||||
|
||||
|
@ -1774,7 +1862,11 @@ func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Unix(1, 0)
|
||||
|
@ -1812,7 +1904,11 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now().Add(20 * time.Minute)
|
||||
|
@ -2063,7 +2159,11 @@ func TestScrapeLoop_RespectTimestamps(t *testing.T) {
|
|||
func(ctx context.Context) storage.Appender { return capp },
|
||||
nil, 0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -2097,7 +2197,11 @@ func TestScrapeLoop_DiscardTimestamps(t *testing.T) {
|
|||
func(ctx context.Context) storage.Appender { return capp },
|
||||
nil, 0,
|
||||
false,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
now := time.Now()
|
||||
|
@ -2130,7 +2234,11 @@ func TestScrapeLoopDiscardDuplicateLabels(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
defer cancel()
|
||||
|
||||
|
@ -2181,7 +2289,11 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
defer cancel()
|
||||
|
||||
|
@ -2274,7 +2386,7 @@ func TestReuseScrapeCache(t *testing.T) {
|
|||
ScrapeInterval: model.Duration(5 * time.Second),
|
||||
MetricsPath: "/metrics",
|
||||
}
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil)
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil, false)
|
||||
t1 = &Target{
|
||||
discoveredLabels: labels.Labels{
|
||||
labels.Label{
|
||||
|
@ -2399,7 +2511,11 @@ func TestScrapeAddFast(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
defer cancel()
|
||||
|
||||
|
@ -2429,7 +2545,7 @@ func TestReuseCacheRace(t *testing.T) {
|
|||
ScrapeInterval: model.Duration(5 * time.Second),
|
||||
MetricsPath: "/metrics",
|
||||
}
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil)
|
||||
sp, _ = newScrapePool(cfg, app, 0, nil, false)
|
||||
t1 = &Target{
|
||||
discoveredLabels: labels.Labels{
|
||||
labels.Label{
|
||||
|
@ -2483,7 +2599,11 @@ func TestScrapeReportSingleAppender(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
nil,
|
||||
10*time.Millisecond,
|
||||
time.Hour,
|
||||
false,
|
||||
)
|
||||
|
||||
numScrapes := 0
|
||||
|
@ -2498,7 +2618,7 @@ func TestScrapeReportSingleAppender(t *testing.T) {
|
|||
}
|
||||
|
||||
go func() {
|
||||
sl.run(10*time.Millisecond, time.Hour, nil)
|
||||
sl.run(nil)
|
||||
signal <- struct{}{}
|
||||
}()
|
||||
|
||||
|
@ -2612,7 +2732,11 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
|
|||
nil,
|
||||
0,
|
||||
true,
|
||||
0,
|
||||
&test.labelLimits,
|
||||
0,
|
||||
0,
|
||||
false,
|
||||
)
|
||||
|
||||
slApp := sl.appender(context.Background())
|
||||
|
@ -2627,3 +2751,40 @@ func TestScrapeLoopLabelLimit(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
|
||||
interval, _ := model.ParseDuration("2s")
|
||||
timeout, _ := model.ParseDuration("500ms")
|
||||
config := &config.ScrapeConfig{
|
||||
ScrapeInterval: interval,
|
||||
ScrapeTimeout: timeout,
|
||||
RelabelConfigs: []*relabel.Config{
|
||||
{
|
||||
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
|
||||
Regex: relabel.MustNewRegexp("2s"),
|
||||
Replacement: "3s",
|
||||
TargetLabel: model.ScrapeIntervalLabel,
|
||||
Action: relabel.Replace,
|
||||
},
|
||||
{
|
||||
SourceLabels: model.LabelNames{model.ScrapeTimeoutLabel},
|
||||
Regex: relabel.MustNewRegexp("500ms"),
|
||||
Replacement: "750ms",
|
||||
TargetLabel: model.ScrapeTimeoutLabel,
|
||||
Action: relabel.Replace,
|
||||
},
|
||||
},
|
||||
}
|
||||
sp, _ := newScrapePool(config, &nopAppendable{}, 0, nil, false)
|
||||
tgts := []*targetgroup.Group{
|
||||
{
|
||||
Targets: []model.LabelSet{{model.AddressLabel: "127.0.0.1:9090"}},
|
||||
},
|
||||
}
|
||||
|
||||
sp.Sync(tgts)
|
||||
defer sp.stop()
|
||||
|
||||
require.Equal(t, "3s", sp.ActiveTargets()[0].labels.Get(model.ScrapeIntervalLabel))
|
||||
require.Equal(t, "750ms", sp.ActiveTargets()[0].labels.Get(model.ScrapeTimeoutLabel))
|
||||
}
|
||||
|
|
|
@ -143,8 +143,18 @@ func (t *Target) SetMetadataStore(s MetricMetadataStore) {
|
|||
// hash returns an identifying hash for the target.
|
||||
func (t *Target) hash() uint64 {
|
||||
h := fnv.New64a()
|
||||
|
||||
// We must build a label set without the scrape interval and timeout
|
||||
// labels because those aren't defining attributes of a target
|
||||
// and can be changed without qualifying its parent as a new target,
|
||||
// therefore they should not effect its unique hash.
|
||||
l := t.labels.Map()
|
||||
delete(l, model.ScrapeIntervalLabel)
|
||||
delete(l, model.ScrapeTimeoutLabel)
|
||||
lset := labels.FromMap(l)
|
||||
|
||||
//nolint: errcheck
|
||||
h.Write([]byte(fmt.Sprintf("%016d", t.labels.Hash())))
|
||||
h.Write([]byte(fmt.Sprintf("%016d", lset.Hash())))
|
||||
//nolint: errcheck
|
||||
h.Write([]byte(t.URL().String()))
|
||||
|
||||
|
@ -273,6 +283,31 @@ func (t *Target) Health() TargetHealth {
|
|||
return t.health
|
||||
}
|
||||
|
||||
// intervalAndTimeout returns the interval and timeout derived from
|
||||
// the targets labels.
|
||||
func (t *Target) intervalAndTimeout(defaultInterval, defaultDuration time.Duration) (time.Duration, time.Duration, error) {
|
||||
t.mtx.RLock()
|
||||
defer t.mtx.RUnlock()
|
||||
|
||||
intervalLabel := t.labels.Get(model.ScrapeIntervalLabel)
|
||||
interval, err := model.ParseDuration(intervalLabel)
|
||||
if err != nil {
|
||||
return defaultInterval, defaultDuration, errors.Errorf("Error parsing interval label %q: %v", intervalLabel, err)
|
||||
}
|
||||
timeoutLabel := t.labels.Get(model.ScrapeTimeoutLabel)
|
||||
timeout, err := model.ParseDuration(timeoutLabel)
|
||||
if err != nil {
|
||||
return defaultInterval, defaultDuration, errors.Errorf("Error parsing timeout label %q: %v", timeoutLabel, err)
|
||||
}
|
||||
|
||||
return time.Duration(interval), time.Duration(timeout), nil
|
||||
}
|
||||
|
||||
// GetValue gets a label value from the entire label set.
|
||||
func (t *Target) GetValue(name string) string {
|
||||
return t.labels.Get(name)
|
||||
}
|
||||
|
||||
// Targets is a sortable list of targets.
|
||||
type Targets []*Target
|
||||
|
||||
|
@ -329,6 +364,8 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
|
|||
// Copy labels into the labelset for the target if they are not set already.
|
||||
scrapeLabels := []labels.Label{
|
||||
{Name: model.JobLabel, Value: cfg.JobName},
|
||||
{Name: model.ScrapeIntervalLabel, Value: cfg.ScrapeInterval.String()},
|
||||
{Name: model.ScrapeTimeoutLabel, Value: cfg.ScrapeTimeout.String()},
|
||||
{Name: model.MetricsPathLabel, Value: cfg.MetricsPath},
|
||||
{Name: model.SchemeLabel, Value: cfg.Scheme},
|
||||
}
|
||||
|
@ -390,6 +427,34 @@ func populateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
|
|||
return nil, nil, err
|
||||
}
|
||||
|
||||
var interval string
|
||||
var intervalDuration model.Duration
|
||||
if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() {
|
||||
intervalDuration, err = model.ParseDuration(interval)
|
||||
if err != nil {
|
||||
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
|
||||
}
|
||||
if time.Duration(intervalDuration) == 0 {
|
||||
return nil, nil, errors.New("scrape interval cannot be 0")
|
||||
}
|
||||
}
|
||||
|
||||
var timeout string
|
||||
var timeoutDuration model.Duration
|
||||
if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() {
|
||||
timeoutDuration, err = model.ParseDuration(timeout)
|
||||
if err != nil {
|
||||
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
|
||||
}
|
||||
if time.Duration(timeoutDuration) == 0 {
|
||||
return nil, nil, errors.New("scrape timeout cannot be 0")
|
||||
}
|
||||
}
|
||||
|
||||
if timeoutDuration > intervalDuration {
|
||||
return nil, nil, errors.Errorf("scrape timeout cannot be greater than scrape interval (%q > %q)", timeout, interval)
|
||||
}
|
||||
|
||||
// Meta labels are deleted after relabelling. Other internal labels propagate to
|
||||
// the target which decides whether they will be part of their label set.
|
||||
for _, l := range lset {
|
||||
|
|
|
@ -382,3 +382,29 @@ func TestTargetsFromGroup(t *testing.T) {
|
|||
t.Fatalf("Expected error %s, got %s", expectedError, failures[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestTargetHash(t *testing.T) {
|
||||
target1 := &Target{
|
||||
labels: labels.Labels{
|
||||
{Name: model.AddressLabel, Value: "localhost"},
|
||||
{Name: model.SchemeLabel, Value: "http"},
|
||||
{Name: model.MetricsPathLabel, Value: "/metrics"},
|
||||
{Name: model.ScrapeIntervalLabel, Value: "15s"},
|
||||
{Name: model.ScrapeTimeoutLabel, Value: "500ms"},
|
||||
},
|
||||
}
|
||||
hash1 := target1.hash()
|
||||
|
||||
target2 := &Target{
|
||||
labels: labels.Labels{
|
||||
{Name: model.AddressLabel, Value: "localhost"},
|
||||
{Name: model.SchemeLabel, Value: "http"},
|
||||
{Name: model.MetricsPathLabel, Value: "/metrics"},
|
||||
{Name: model.ScrapeIntervalLabel, Value: "14s"},
|
||||
{Name: model.ScrapeTimeoutLabel, Value: "600ms"},
|
||||
},
|
||||
}
|
||||
hash2 := target2.hash()
|
||||
|
||||
require.Equal(t, hash1, hash2, "Scrape interval and duration labels should not effect hash.")
|
||||
}
|
||||
|
|
|
@ -50,7 +50,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// Non-ASCII space (not allowed in text/template, see https://github.com/golang/go/blob/master/src/text/template/parse/lex.go#L98)
|
||||
text: "{{ }}",
|
||||
shouldFail: true,
|
||||
errorMsg: "error parsing template test: template: test:1: unexpected unrecognized character in action: U+00A0 in command",
|
||||
errorMsg: "error parsing template test: template: test:1: unrecognized character in action: U+00A0",
|
||||
},
|
||||
{
|
||||
// HTML escaping.
|
||||
|
@ -157,7 +157,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// Unparsable template.
|
||||
text: "{{",
|
||||
shouldFail: true,
|
||||
errorMsg: "error parsing template test: template: test:1: unexpected unclosed action in command",
|
||||
errorMsg: "error parsing template test: template: test:1: unclosed action",
|
||||
},
|
||||
{
|
||||
// Error in function.
|
||||
|
@ -194,7 +194,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// Humanize - string with error.
|
||||
text: `{{ humanize "one" }}`,
|
||||
shouldFail: true,
|
||||
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanize "one">: error calling humanize: strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
},
|
||||
{
|
||||
// Humanize1024 - float64.
|
||||
|
@ -212,7 +212,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// Humanize1024 - string with error.
|
||||
text: `{{ humanize1024 "one" }}`,
|
||||
shouldFail: true,
|
||||
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanize1024 "one">: error calling humanize1024: strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
},
|
||||
{
|
||||
// HumanizeDuration - seconds - float64.
|
||||
|
@ -242,7 +242,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// HumanizeDuration - string with error.
|
||||
text: `{{ humanizeDuration "one" }}`,
|
||||
shouldFail: true,
|
||||
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
errorMsg: `error executing template test: template: test:1:3: executing "test" at <humanizeDuration "one">: error calling humanizeDuration: strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
},
|
||||
{
|
||||
// Humanize* Inf and NaN - float64.
|
||||
|
@ -270,7 +270,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
// HumanizePercentage - model.SampleValue input - string with error.
|
||||
text: `{{ "one" | humanizePercentage }}`,
|
||||
shouldFail: true,
|
||||
errorMsg: `strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
errorMsg: `error executing template test: template: test:1:11: executing "test" at <humanizePercentage>: error calling humanizePercentage: strconv.ParseFloat: parsing "one": invalid syntax`,
|
||||
},
|
||||
{
|
||||
// HumanizeTimestamp - model.SampleValue input - float64.
|
||||
|
@ -349,6 +349,7 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
}
|
||||
if s.shouldFail {
|
||||
require.Error(t, err, "%v", s.text)
|
||||
require.EqualError(t, err, s.errorMsg)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
Memory snapshot uses the WAL package and writes each series as a WAL record.
|
||||
Below are the formats of the individual records.
|
||||
|
||||
The order of records in the snapshot is always:
|
||||
1. Starts with series records, one per series, in an unsorted fashion.
|
||||
2. After all series are done, we write a tombstone record containing all the tombstones.
|
||||
3. At the end, we write one or more exemplar records while batching up the exemplars in each record. Exemplars are in the order they were written to the circular buffer.
|
||||
|
||||
### Series records
|
||||
|
||||
This record is a snapshot of a single series. Only one series exists per record.
|
||||
|
@ -60,3 +65,30 @@ as tombstone file in blocks.
|
|||
│ len(Encoded Tombstones) <uvarint> │ Encoded Tombstones <bytes> │
|
||||
└───────────────────────────────────┴─────────────────────────────┘
|
||||
```
|
||||
|
||||
|
||||
### Exemplar record
|
||||
|
||||
A single exemplar record contains one or more exemplars, encoded in the same way as we do in WAL but with changed record type.
|
||||
|
||||
```
|
||||
┌───────────────────────────────────────────────────────────────────┐
|
||||
│ Record Type <byte> │
|
||||
├───────────────────────────────────────────────────────────────────┤
|
||||
│ ┌────────────────────┬───────────────────────────┐ │
|
||||
│ │ series ref <8b> │ timestamp <8b> │ │
|
||||
│ └────────────────────┴───────────────────────────┘ │
|
||||
│ ┌─────────────────────┬───────────────────────────┬─────────────┐ │
|
||||
│ │ ref_delta <uvarint> │ timestamp_delta <uvarint> │ value <8b> │ │
|
||||
│ ├─────────────────────┴───────────────────────────┴─────────────┤ │
|
||||
│ │ n = len(labels) <uvarint> │ │
|
||||
│ ├───────────────────────────────┬───────────────────────────────┤ │
|
||||
│ │ len(str_1) <uvarint> │ str_1 <bytes> │ │
|
||||
│ ├───────────────────────────────┴───────────────────────────────┤ │
|
||||
│ │ ... │ │
|
||||
│ ├───────────────────────────────┬───────────────────────────────┤ │
|
||||
│ │ len(str_2n) <uvarint> │ str_2n <bytes> │ │
|
||||
│ ├───────────────────────────────┴───────────────────────────────┤ │
|
||||
│ . . . │
|
||||
└───────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
|
|
@ -279,7 +279,7 @@ func (ce *CircularExemplarStorage) Resize(l int64) int {
|
|||
|
||||
migrated := 0
|
||||
|
||||
if l > 0 {
|
||||
if l > 0 && len(oldBuffer) > 0 {
|
||||
// Rewind previous next index by count with wrap-around.
|
||||
// This math is essentially looking at nextIndex, where we would write the next exemplar to,
|
||||
// and find the index in the old exemplar buffer that we should start migrating exemplars from.
|
||||
|
@ -400,3 +400,23 @@ func (ce *CircularExemplarStorage) computeMetrics() {
|
|||
ce.metrics.lastExemplarsTs.Set(float64(ce.exemplars[0].exemplar.Ts) / 1000)
|
||||
}
|
||||
}
|
||||
|
||||
// IterateExemplars iterates through all the exemplars from oldest to newest appended and calls
|
||||
// the given function on all of them till the end (or) till the first function call that returns an error.
|
||||
func (ce *CircularExemplarStorage) IterateExemplars(f func(seriesLabels labels.Labels, e exemplar.Exemplar) error) error {
|
||||
ce.lock.RLock()
|
||||
defer ce.lock.RUnlock()
|
||||
|
||||
idx := ce.nextIndex
|
||||
l := len(ce.exemplars)
|
||||
for i := 0; i < l; i, idx = i+1, (idx+1)%l {
|
||||
if ce.exemplars[idx] == nil {
|
||||
continue
|
||||
}
|
||||
err := f(ce.exemplars[idx].ref.seriesLabels, ce.exemplars[idx].exemplar)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -413,7 +413,7 @@ func TestResize(t *testing.T) {
|
|||
expectedMigrated: 50,
|
||||
},
|
||||
{
|
||||
name: "Zero",
|
||||
name: "ShrinkToZero",
|
||||
startSize: 100,
|
||||
newCount: 0,
|
||||
expectedSeries: []int{},
|
||||
|
@ -436,6 +436,14 @@ func TestResize(t *testing.T) {
|
|||
notExpectedSeries: []int{},
|
||||
expectedMigrated: 0,
|
||||
},
|
||||
{
|
||||
name: "GrowFromZero",
|
||||
startSize: 0,
|
||||
newCount: 10,
|
||||
expectedSeries: []int{},
|
||||
notExpectedSeries: []int{},
|
||||
expectedMigrated: 0,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
@ -477,16 +485,27 @@ func TestResize(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func BenchmarkAddExemplar(t *testing.B) {
|
||||
exs, err := NewCircularExemplarStorage(int64(t.N), eMetrics)
|
||||
require.NoError(t, err)
|
||||
es := exs.(*CircularExemplarStorage)
|
||||
func BenchmarkAddExemplar(b *testing.B) {
|
||||
// We need to include these labels since we do length calculation
|
||||
// before adding.
|
||||
exLabels := labels.Labels{{Name: "traceID", Value: "89620921"}}
|
||||
|
||||
for i := 0; i < t.N; i++ {
|
||||
l := labels.FromStrings("service", strconv.Itoa(i))
|
||||
for _, n := range []int{10000, 100000, 1000000} {
|
||||
b.Run(fmt.Sprintf("%d", n), func(b *testing.B) {
|
||||
exs, err := NewCircularExemplarStorage(int64(n), eMetrics)
|
||||
require.NoError(b, err)
|
||||
es := exs.(*CircularExemplarStorage)
|
||||
|
||||
err = es.AddExemplar(l, exemplar.Exemplar{Value: float64(i), Ts: int64(i)})
|
||||
require.NoError(t, err)
|
||||
b.ResetTimer()
|
||||
l := labels.Labels{{Name: "service", Value: strconv.Itoa(0)}}
|
||||
for i := 0; i < n; i++ {
|
||||
if i%100 == 0 {
|
||||
l = labels.Labels{{Name: "service", Value: strconv.Itoa(i)}}
|
||||
}
|
||||
err = es.AddExemplar(l, exemplar.Exemplar{Value: float64(i), Ts: int64(i), Labels: exLabels})
|
||||
require.NoError(b, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -113,6 +113,7 @@ type ExemplarStorage interface {
|
|||
storage.ExemplarQueryable
|
||||
AddExemplar(labels.Labels, exemplar.Exemplar) error
|
||||
ValidateExemplar(labels.Labels, exemplar.Exemplar) error
|
||||
IterateExemplars(f func(seriesLabels labels.Labels, e exemplar.Exemplar) error) error
|
||||
}
|
||||
|
||||
// HeadOptions are parameters for the Head block.
|
||||
|
@ -454,7 +455,7 @@ const cardinalityCacheExpirationTime = time.Duration(30) * time.Second
|
|||
// Init loads data from the write ahead log and prepares the head for writes.
|
||||
// It should be called before using an appender so that it
|
||||
// limits the ingested samples to the head min valid time.
|
||||
func (h *Head) Init(minValidTime int64) (err error) {
|
||||
func (h *Head) Init(minValidTime int64) error {
|
||||
h.minValidTime.Store(minValidTime)
|
||||
defer h.postings.EnsureOrder()
|
||||
defer h.gc() // After loading the wal remove the obsolete data from the head.
|
||||
|
@ -474,6 +475,7 @@ func (h *Head) Init(minValidTime int64) (err error) {
|
|||
|
||||
if h.opts.EnableMemorySnapshotOnShutdown {
|
||||
level.Info(h.logger).Log("msg", "Chunk snapshot is enabled, replaying from the snapshot")
|
||||
var err error
|
||||
snapIdx, snapOffset, refSeries, err = h.loadChunkSnapshot()
|
||||
if err != nil {
|
||||
snapIdx, snapOffset = -1, 0
|
||||
|
|
|
@ -2496,9 +2496,62 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
require.NoError(t, head.Close())
|
||||
}()
|
||||
|
||||
type ex struct {
|
||||
seriesLabels labels.Labels
|
||||
e exemplar.Exemplar
|
||||
}
|
||||
|
||||
numSeries := 10
|
||||
expSeries := make(map[string][]tsdbutil.Sample)
|
||||
expTombstones := make(map[uint64]tombstones.Intervals)
|
||||
expExemplars := make([]ex, 0)
|
||||
|
||||
addExemplar := func(app storage.Appender, ref uint64, lbls labels.Labels, ts int64) {
|
||||
e := ex{
|
||||
seriesLabels: lbls,
|
||||
e: exemplar.Exemplar{
|
||||
Labels: labels.Labels{{Name: "traceID", Value: fmt.Sprintf("%d", rand.Int())}},
|
||||
Value: rand.Float64(),
|
||||
Ts: ts,
|
||||
},
|
||||
}
|
||||
expExemplars = append(expExemplars, e)
|
||||
_, err := app.AppendExemplar(ref, e.seriesLabels, e.e)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
checkSamples := func() {
|
||||
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64)
|
||||
require.NoError(t, err)
|
||||
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*"))
|
||||
require.Equal(t, expSeries, series)
|
||||
}
|
||||
checkTombstones := func() {
|
||||
tr, err := head.Tombstones()
|
||||
require.NoError(t, err)
|
||||
actTombstones := make(map[uint64]tombstones.Intervals)
|
||||
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
|
||||
for _, itv := range itvs {
|
||||
actTombstones[ref].Add(itv)
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
require.Equal(t, expTombstones, actTombstones)
|
||||
}
|
||||
checkExemplars := func() {
|
||||
actExemplars := make([]ex, 0, len(expExemplars))
|
||||
err := head.exemplars.IterateExemplars(func(seriesLabels labels.Labels, e exemplar.Exemplar) error {
|
||||
actExemplars = append(actExemplars, ex{
|
||||
seriesLabels: seriesLabels,
|
||||
e: e,
|
||||
})
|
||||
return nil
|
||||
})
|
||||
require.NoError(t, err)
|
||||
// Verifies both existence of right exemplars and order of exemplars in the buffer.
|
||||
require.Equal(t, expExemplars, actExemplars)
|
||||
}
|
||||
|
||||
{ // Initial data that goes into snapshot.
|
||||
// Add some initial samples with >=1 m-map chunk.
|
||||
app := head.Appender(context.Background())
|
||||
|
@ -2509,11 +2562,12 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
for ts := int64(1); ts <= 200; ts++ {
|
||||
val := rand.Float64()
|
||||
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val})
|
||||
_, err := app.Append(0, lbls, ts, val)
|
||||
ref, err := app.Append(0, lbls, ts, val)
|
||||
require.NoError(t, err)
|
||||
|
||||
// To create multiple WAL records.
|
||||
// Add an exemplar and to create multiple WAL records.
|
||||
if ts%10 == 0 {
|
||||
addExemplar(app, ref, lbls, ts)
|
||||
require.NoError(t, app.Commit())
|
||||
app = head.Appender(context.Background())
|
||||
}
|
||||
|
@ -2538,6 +2592,7 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
}, nil))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// These references should be the ones used for the snapshot.
|
||||
|
@ -2563,22 +2618,9 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
require.NoError(t, head.Init(math.MinInt64))
|
||||
|
||||
// Test query for snapshot replay.
|
||||
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64)
|
||||
require.NoError(t, err)
|
||||
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*"))
|
||||
require.Equal(t, expSeries, series)
|
||||
|
||||
// Check the tombstones.
|
||||
tr, err := head.Tombstones()
|
||||
require.NoError(t, err)
|
||||
actTombstones := make(map[uint64]tombstones.Intervals)
|
||||
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
|
||||
for _, itv := range itvs {
|
||||
actTombstones[ref].Add(itv)
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
require.Equal(t, expTombstones, actTombstones)
|
||||
checkSamples()
|
||||
checkTombstones()
|
||||
checkExemplars()
|
||||
}
|
||||
|
||||
{ // Additional data to only include in WAL and m-mapped chunks and not snapshot. This mimics having an old snapshot on disk.
|
||||
|
@ -2592,11 +2634,12 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
for ts := int64(201); ts <= 400; ts++ {
|
||||
val := rand.Float64()
|
||||
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val})
|
||||
_, err := app.Append(0, lbls, ts, val)
|
||||
ref, err := app.Append(0, lbls, ts, val)
|
||||
require.NoError(t, err)
|
||||
|
||||
// To create multiple WAL records.
|
||||
// Add an exemplar and to create multiple WAL records.
|
||||
if ts%10 == 0 {
|
||||
addExemplar(app, ref, lbls, ts)
|
||||
require.NoError(t, app.Commit())
|
||||
app = head.Appender(context.Background())
|
||||
}
|
||||
|
@ -2643,22 +2686,9 @@ func TestChunkSnapshot(t *testing.T) {
|
|||
require.NoError(t, head.Init(math.MinInt64))
|
||||
|
||||
// Test query when data is replayed from snapshot, m-map chunks, and WAL.
|
||||
q, err := NewBlockQuerier(head, math.MinInt64, math.MaxInt64)
|
||||
require.NoError(t, err)
|
||||
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", ".*"))
|
||||
require.Equal(t, expSeries, series)
|
||||
|
||||
// Check the tombstones.
|
||||
tr, err := head.Tombstones()
|
||||
require.NoError(t, err)
|
||||
actTombstones := make(map[uint64]tombstones.Intervals)
|
||||
require.NoError(t, tr.Iter(func(ref uint64, itvs tombstones.Intervals) error {
|
||||
for _, itv := range itvs {
|
||||
actTombstones[ref].Add(itv)
|
||||
}
|
||||
return nil
|
||||
}))
|
||||
require.Equal(t, expTombstones, actTombstones)
|
||||
checkSamples()
|
||||
checkTombstones()
|
||||
checkExemplars()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
123
tsdb/head_wal.go
123
tsdb/head_wal.go
|
@ -413,6 +413,7 @@ func (h *Head) processWALSamples(
|
|||
const (
|
||||
chunkSnapshotRecordTypeSeries uint8 = 1
|
||||
chunkSnapshotRecordTypeTombstones uint8 = 2
|
||||
chunkSnapshotRecordTypeExemplars uint8 = 3
|
||||
)
|
||||
|
||||
type chunkSnapshotRecord struct {
|
||||
|
@ -537,6 +538,10 @@ const chunkSnapshotPrefix = "chunk_snapshot."
|
|||
// The chunk snapshot is stored in a directory named chunk_snapshot.N.M and is written
|
||||
// using the WAL package. N is the last WAL segment present during snapshotting and
|
||||
// M is the offset in segment N upto which data was written.
|
||||
//
|
||||
// The snapshot first contains all series (each in individual records and not sorted), followed by
|
||||
// tombstones (a single record), and finally exemplars (>= 1 record). Exemplars are in the order they
|
||||
// were written to the circular buffer.
|
||||
func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
|
||||
if h.wal == nil {
|
||||
// If we are not storing any WAL, does not make sense to take a snapshot too.
|
||||
|
@ -587,6 +592,7 @@ func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
|
|||
buf []byte
|
||||
recs [][]byte
|
||||
)
|
||||
// Add all series to the snapshot.
|
||||
stripeSize := h.series.size
|
||||
for i := 0; i < stripeSize; i++ {
|
||||
h.series.locks[i].RLock()
|
||||
|
@ -622,11 +628,61 @@ func (h *Head) ChunkSnapshot() (*ChunkSnapshotStats, error) {
|
|||
return stats, errors.Wrap(err, "encode tombstones")
|
||||
}
|
||||
recs = append(recs, rec)
|
||||
|
||||
// Flush remaining records.
|
||||
// Flush remaining series records and tombstones.
|
||||
if err := cp.Log(recs...); err != nil {
|
||||
return stats, errors.Wrap(err, "flush records")
|
||||
}
|
||||
buf = buf[:0]
|
||||
|
||||
// Add exemplars in the snapshot.
|
||||
// We log in batches, with each record having upto 10000 exemplars.
|
||||
// Assuming 100 bytes (overestimate) per exemplar, that's ~1MB.
|
||||
maxExemplarsPerRecord := 10000
|
||||
batch := make([]record.RefExemplar, 0, maxExemplarsPerRecord)
|
||||
enc := record.Encoder{}
|
||||
flushExemplars := func() error {
|
||||
if len(batch) == 0 {
|
||||
return nil
|
||||
}
|
||||
buf = buf[:0]
|
||||
encbuf := encoding.Encbuf{B: buf}
|
||||
encbuf.PutByte(chunkSnapshotRecordTypeExemplars)
|
||||
enc.EncodeExemplarsIntoBuffer(batch, &encbuf)
|
||||
if err := cp.Log(encbuf.Get()); err != nil {
|
||||
return errors.Wrap(err, "log exemplars")
|
||||
}
|
||||
buf, batch = buf[:0], batch[:0]
|
||||
return nil
|
||||
}
|
||||
err = h.exemplars.IterateExemplars(func(seriesLabels labels.Labels, e exemplar.Exemplar) error {
|
||||
if len(batch) >= maxExemplarsPerRecord {
|
||||
if err := flushExemplars(); err != nil {
|
||||
return errors.Wrap(err, "flush exemplars")
|
||||
}
|
||||
}
|
||||
|
||||
ms := h.series.getByHash(seriesLabels.Hash(), seriesLabels)
|
||||
if ms == nil {
|
||||
// It is possible that exemplar refers to some old series. We discard such exemplars.
|
||||
return nil
|
||||
}
|
||||
batch = append(batch, record.RefExemplar{
|
||||
Ref: ms.ref,
|
||||
T: e.Ts,
|
||||
V: e.Value,
|
||||
Labels: e.Labels,
|
||||
})
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return stats, errors.Wrap(err, "iterate exemplars")
|
||||
}
|
||||
|
||||
// Flush remaining exemplars.
|
||||
if err := flushExemplars(); err != nil {
|
||||
return stats, errors.Wrap(err, "flush exemplars at the end")
|
||||
}
|
||||
|
||||
if err := cp.Close(); err != nil {
|
||||
return stats, errors.Wrap(err, "close chunk snapshot")
|
||||
}
|
||||
|
@ -766,6 +822,9 @@ func (h *Head) loadChunkSnapshot() (int, int, map[uint64]*memSeries, error) {
|
|||
recordChan = make(chan chunkSnapshotRecord, 5*n)
|
||||
shardedRefSeries = make([]map[uint64]*memSeries, n)
|
||||
errChan = make(chan error, n)
|
||||
refSeries map[uint64]*memSeries
|
||||
exemplarBuf []record.RefExemplar
|
||||
dec record.Decoder
|
||||
)
|
||||
|
||||
wg.Add(n)
|
||||
|
@ -852,15 +911,58 @@ Outer:
|
|||
loopErr = errors.Wrap(err, "iterate tombstones")
|
||||
break Outer
|
||||
}
|
||||
|
||||
case chunkSnapshotRecordTypeExemplars:
|
||||
// Exemplars are at the end of snapshot. So all series are loaded at this point.
|
||||
if len(refSeries) == 0 {
|
||||
close(recordChan)
|
||||
wg.Wait()
|
||||
|
||||
refSeries = make(map[uint64]*memSeries, numSeries)
|
||||
for _, shard := range shardedRefSeries {
|
||||
for k, v := range shard {
|
||||
refSeries[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
decbuf := encoding.Decbuf{B: rec[1:]}
|
||||
|
||||
exemplarBuf = exemplarBuf[:0]
|
||||
exemplarBuf, err = dec.ExemplarsFromBuffer(&decbuf, exemplarBuf)
|
||||
if err != nil {
|
||||
loopErr = errors.Wrap(err, "exemplars from buffer")
|
||||
break Outer
|
||||
}
|
||||
|
||||
for _, e := range exemplarBuf {
|
||||
ms, ok := refSeries[e.Ref]
|
||||
if !ok {
|
||||
unknownRefs++
|
||||
continue
|
||||
}
|
||||
|
||||
if err := h.exemplars.AddExemplar(ms.lset, exemplar.Exemplar{
|
||||
Labels: e.Labels,
|
||||
Value: e.V,
|
||||
Ts: e.T,
|
||||
}); err != nil {
|
||||
loopErr = errors.Wrap(err, "append exemplar")
|
||||
break Outer
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
// This is a record type we don't understand. It is either and old format from earlier versions,
|
||||
// or a new format and the code was rolled back to old version.
|
||||
loopErr = errors.Errorf("unsuported snapshot record type 0b%b", rec[0])
|
||||
break Outer
|
||||
}
|
||||
|
||||
}
|
||||
close(recordChan)
|
||||
wg.Wait()
|
||||
if len(refSeries) == 0 {
|
||||
close(recordChan)
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
close(errChan)
|
||||
merr := tsdb_errors.NewMulti(errors.Wrap(loopErr, "decode loop"))
|
||||
|
@ -875,10 +977,13 @@ Outer:
|
|||
return -1, -1, nil, errors.Wrap(r.Err(), "read records")
|
||||
}
|
||||
|
||||
refSeries := make(map[uint64]*memSeries, numSeries)
|
||||
for _, shard := range shardedRefSeries {
|
||||
for k, v := range shard {
|
||||
refSeries[k] = v
|
||||
if len(refSeries) == 0 {
|
||||
// We had no exemplar record, so we have to build the map here.
|
||||
refSeries = make(map[uint64]*memSeries, numSeries)
|
||||
for _, shard := range shardedRefSeries {
|
||||
for k, v := range shard {
|
||||
refSeries[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -182,6 +182,11 @@ func (d *Decoder) Exemplars(rec []byte, exemplars []RefExemplar) ([]RefExemplar,
|
|||
if t != Exemplars {
|
||||
return nil, errors.New("invalid record type")
|
||||
}
|
||||
|
||||
return d.ExemplarsFromBuffer(&dec, exemplars)
|
||||
}
|
||||
|
||||
func (d *Decoder) ExemplarsFromBuffer(dec *encoding.Decbuf, exemplars []RefExemplar) ([]RefExemplar, error) {
|
||||
if dec.Len() == 0 {
|
||||
return exemplars, nil
|
||||
}
|
||||
|
@ -287,6 +292,12 @@ func (e *Encoder) Exemplars(exemplars []RefExemplar, b []byte) []byte {
|
|||
return buf.Get()
|
||||
}
|
||||
|
||||
e.EncodeExemplarsIntoBuffer(exemplars, &buf)
|
||||
|
||||
return buf.Get()
|
||||
}
|
||||
|
||||
func (e *Encoder) EncodeExemplarsIntoBuffer(exemplars []RefExemplar, buf *encoding.Encbuf) {
|
||||
// Store base timestamp and base reference number of first sample.
|
||||
// All samples encode their timestamp and ref as delta to those.
|
||||
first := exemplars[0]
|
||||
|
@ -305,6 +316,4 @@ func (e *Encoder) Exemplars(exemplars []RefExemplar, b []byte) []byte {
|
|||
buf.PutUvarintStr(l.Value)
|
||||
}
|
||||
}
|
||||
|
||||
return buf.Get()
|
||||
}
|
||||
|
|
|
@ -760,6 +760,9 @@ type Target struct {
|
|||
LastScrape time.Time `json:"lastScrape"`
|
||||
LastScrapeDuration float64 `json:"lastScrapeDuration"`
|
||||
Health scrape.TargetHealth `json:"health"`
|
||||
|
||||
ScrapeInterval string `json:"scrapeInterval"`
|
||||
ScrapeTimeout string `json:"scrapeTimeout"`
|
||||
}
|
||||
|
||||
// DroppedTarget has the information for one target that was dropped during relabelling.
|
||||
|
@ -899,6 +902,8 @@ func (api *API) targets(r *http.Request) apiFuncResult {
|
|||
LastScrape: target.LastScrape(),
|
||||
LastScrapeDuration: target.LastScrapeDuration().Seconds(),
|
||||
Health: target.Health(),
|
||||
ScrapeInterval: target.GetValue(model.ScrapeIntervalLabel),
|
||||
ScrapeTimeout: target.GetValue(model.ScrapeTimeoutLabel),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -534,10 +534,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
|||
{
|
||||
Identifier: "test",
|
||||
Labels: labels.FromMap(map[string]string{
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "example.com:8080",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "test",
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "example.com:8080",
|
||||
model.MetricsPathLabel: "/metrics",
|
||||
model.JobLabel: "test",
|
||||
model.ScrapeIntervalLabel: "15s",
|
||||
model.ScrapeTimeoutLabel: "5s",
|
||||
}),
|
||||
DiscoveredLabels: nil,
|
||||
Params: url.Values{},
|
||||
|
@ -547,10 +549,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
|||
{
|
||||
Identifier: "blackbox",
|
||||
Labels: labels.FromMap(map[string]string{
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "localhost:9115",
|
||||
model.MetricsPathLabel: "/probe",
|
||||
model.JobLabel: "blackbox",
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "localhost:9115",
|
||||
model.MetricsPathLabel: "/probe",
|
||||
model.JobLabel: "blackbox",
|
||||
model.ScrapeIntervalLabel: "20s",
|
||||
model.ScrapeTimeoutLabel: "10s",
|
||||
}),
|
||||
DiscoveredLabels: nil,
|
||||
Params: url.Values{"target": []string{"example.com"}},
|
||||
|
@ -561,10 +565,12 @@ func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
|||
Identifier: "blackbox",
|
||||
Labels: nil,
|
||||
DiscoveredLabels: labels.FromMap(map[string]string{
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "http://dropped.example.com:9115",
|
||||
model.MetricsPathLabel: "/probe",
|
||||
model.JobLabel: "blackbox",
|
||||
model.SchemeLabel: "http",
|
||||
model.AddressLabel: "http://dropped.example.com:9115",
|
||||
model.MetricsPathLabel: "/probe",
|
||||
model.JobLabel: "blackbox",
|
||||
model.ScrapeIntervalLabel: "30s",
|
||||
model.ScrapeTimeoutLabel: "15s",
|
||||
}),
|
||||
Params: url.Values{},
|
||||
Active: false,
|
||||
|
@ -951,6 +957,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
ScrapeInterval: "20s",
|
||||
ScrapeTimeout: "10s",
|
||||
},
|
||||
{
|
||||
DiscoveredLabels: map[string]string{},
|
||||
|
@ -964,15 +972,19 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.07,
|
||||
ScrapeInterval: "15s",
|
||||
ScrapeTimeout: "5s",
|
||||
},
|
||||
},
|
||||
DroppedTargets: []*DroppedTarget{
|
||||
{
|
||||
DiscoveredLabels: map[string]string{
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__scrape_interval__": "30s",
|
||||
"__scrape_timeout__": "15s",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -997,6 +1009,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
ScrapeInterval: "20s",
|
||||
ScrapeTimeout: "10s",
|
||||
},
|
||||
{
|
||||
DiscoveredLabels: map[string]string{},
|
||||
|
@ -1010,15 +1024,19 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.07,
|
||||
ScrapeInterval: "15s",
|
||||
ScrapeTimeout: "5s",
|
||||
},
|
||||
},
|
||||
DroppedTargets: []*DroppedTarget{
|
||||
{
|
||||
DiscoveredLabels: map[string]string{
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__scrape_interval__": "30s",
|
||||
"__scrape_timeout__": "15s",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -1043,6 +1061,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "failed: missing port in address",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.1,
|
||||
ScrapeInterval: "20s",
|
||||
ScrapeTimeout: "10s",
|
||||
},
|
||||
{
|
||||
DiscoveredLabels: map[string]string{},
|
||||
|
@ -1056,6 +1076,8 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
LastError: "",
|
||||
LastScrape: scrapeStart,
|
||||
LastScrapeDuration: 0.07,
|
||||
ScrapeInterval: "15s",
|
||||
ScrapeTimeout: "5s",
|
||||
},
|
||||
},
|
||||
DroppedTargets: []*DroppedTarget{},
|
||||
|
@ -1071,10 +1093,12 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
|
|||
DroppedTargets: []*DroppedTarget{
|
||||
{
|
||||
DiscoveredLabels: map[string]string{
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__address__": "http://dropped.example.com:9115",
|
||||
"__metrics_path__": "/probe",
|
||||
"__scheme__": "http",
|
||||
"job": "blackbox",
|
||||
"__scrape_interval__": "30s",
|
||||
"__scrape_timeout__": "15s",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -45,7 +45,7 @@
|
|||
"scripts": {
|
||||
"start": "react-scripts start",
|
||||
"build": "react-scripts build",
|
||||
"test": "react-scripts test --runInBand",
|
||||
"test": "react-scripts test --runInBand --resetMocks=false",
|
||||
"test:debug": "react-scripts --inspect-brk test --runInBand --no-cache",
|
||||
"eject": "react-scripts eject",
|
||||
"lint:ci": "eslint --quiet \"src/**/*.{ts,tsx}\"",
|
||||
|
@ -83,7 +83,8 @@
|
|||
"enzyme": "^3.11.0",
|
||||
"enzyme-to-json": "^3.6.2",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-plugin-prettier": "^3.4.1",
|
||||
"eslint-config-react-app": "^6.0.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"jest-fetch-mock": "^3.0.3",
|
||||
"mutationobserver-shim": "^0.3.7",
|
||||
"prettier": "^2.3.2",
|
||||
|
|
|
@ -26,7 +26,7 @@ export const StartingContent: FC<StartingContentProps> = ({ status, isUnexpected
|
|||
{status && status.max > 0 ? (
|
||||
<div>
|
||||
<p>
|
||||
Replaying WAL ({status?.current}/{status?.max})
|
||||
Replaying WAL ({status.current}/{status.max})
|
||||
</p>
|
||||
<Progress
|
||||
animated
|
||||
|
|
|
@ -11,7 +11,9 @@ interface StatusIndicatorProps {
|
|||
}
|
||||
|
||||
export const withStatusIndicator =
|
||||
<T extends Record<string, any>>(Component: ComponentType<T>): FC<StatusIndicatorProps & T> =>
|
||||
<T extends Record<string, any>>( // eslint-disable-line @typescript-eslint/no-explicit-any
|
||||
Component: ComponentType<T>
|
||||
): FC<StatusIndicatorProps & T> =>
|
||||
({ error, isLoading, customErrorMsg, componentTitle, ...rest }) => {
|
||||
if (error) {
|
||||
return (
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import jquery from 'jquery';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(window as any).jQuery = jquery;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(window as any).moment = require('moment');
|
||||
|
|
|
@ -10,19 +10,15 @@ export interface FetchState<T> {
|
|||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export interface FetchStateReady {
|
||||
ready: boolean;
|
||||
isUnexpected: boolean;
|
||||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export interface FetchStateReadyInterval {
|
||||
ready: boolean;
|
||||
isUnexpected: boolean;
|
||||
walReplayStatus: WALReplayStatus;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export const useFetch = <T extends Record<string, any>>(url: string, options?: RequestInit): FetchState<T> => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const [response, setResponse] = useState<APIResponse<T>>({ status: 'start fetching' } as any);
|
||||
const [error, setError] = useState<Error>();
|
||||
const [isLoading, setIsLoading] = useState<boolean>(true);
|
||||
|
@ -38,8 +34,9 @@ export const useFetch = <T extends Record<string, any>>(url: string, options?: R
|
|||
const json = (await res.json()) as APIResponse<T>;
|
||||
setResponse(json);
|
||||
setIsLoading(false);
|
||||
} catch (error) {
|
||||
setError(error as Error);
|
||||
} catch (err: unknown) {
|
||||
const error = err as Error;
|
||||
setError(error);
|
||||
}
|
||||
};
|
||||
fetchData();
|
||||
|
@ -54,6 +51,7 @@ let wasReady = false;
|
|||
export const useFetchReadyInterval = (pathPrefix: string, options?: RequestInit): FetchStateReadyInterval => {
|
||||
const [ready, setReady] = useState<boolean>(false);
|
||||
const [isUnexpected, setIsUnexpected] = useState<boolean>(false);
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const [walReplayStatus, setWALReplayStatus] = useState<WALReplayStatus>({} as any);
|
||||
|
||||
useEffect(() => {
|
||||
|
|
|
@ -6,6 +6,7 @@ import { isPresent } from '../../utils';
|
|||
import { Rule } from '../../types/types';
|
||||
import { useLocalStorage } from '../../hooks/useLocalStorage';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type RuleState = keyof RuleStatus<any>;
|
||||
|
||||
export interface RuleStatus<T> {
|
||||
|
@ -108,6 +109,7 @@ interface GroupInfoProps {
|
|||
}
|
||||
|
||||
export const GroupInfo: FC<GroupInfoProps> = ({ rules, children }) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const statesCounter = rules.reduce<any>(
|
||||
(acc, r) => {
|
||||
return {
|
||||
|
|
|
@ -27,7 +27,7 @@ export const ConfigContent: FC<ConfigContentProps> = ({ error, data }) => {
|
|||
<h2>
|
||||
Configuration
|
||||
<CopyToClipboard
|
||||
text={config!}
|
||||
text={config ? config : ''}
|
||||
onCopy={(_, result) => {
|
||||
setCopied(result);
|
||||
setTimeout(setCopied, 1500);
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import React, { Component } from 'react';
|
||||
import { Button, InputGroup, InputGroupAddon, InputGroupText, Input } from 'reactstrap';
|
||||
import { Button, Input, InputGroup, InputGroupAddon, InputGroupText } from 'reactstrap';
|
||||
|
||||
import Downshift, { ControllerStateAndHelpers } from 'downshift';
|
||||
import sanitizeHTML from 'sanitize-html';
|
||||
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||
import { faSearch, faSpinner, faGlobeEurope } from '@fortawesome/free-solid-svg-icons';
|
||||
import { faGlobeEurope, faSearch, faSpinner } from '@fortawesome/free-solid-svg-icons';
|
||||
import MetricsExplorer from './MetricsExplorer';
|
||||
import { Fuzzy, FuzzyResult } from '@nexucis/fuzzy';
|
||||
|
||||
|
@ -42,13 +42,17 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
}
|
||||
|
||||
setHeight = (): void => {
|
||||
const { offsetHeight, clientHeight, scrollHeight } = this.exprInputRef.current!;
|
||||
const offset = offsetHeight - clientHeight; // Needed in order for the height to be more accurate.
|
||||
this.setState({ height: scrollHeight + offset });
|
||||
if (this.exprInputRef.current) {
|
||||
const { offsetHeight, clientHeight, scrollHeight } = this.exprInputRef.current;
|
||||
const offset = offsetHeight - clientHeight; // Needed in order for the height to be more accurate.
|
||||
this.setState({ height: scrollHeight + offset });
|
||||
}
|
||||
};
|
||||
|
||||
handleInput = (): void => {
|
||||
this.setValue(this.exprInputRef.current!.value);
|
||||
if (this.exprInputRef.current) {
|
||||
this.setValue(this.exprInputRef.current.value);
|
||||
}
|
||||
};
|
||||
|
||||
setValue = (value: string): void => {
|
||||
|
@ -76,7 +80,8 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
return fuz.filter(input.replace(/ /g, ''), expressions);
|
||||
};
|
||||
|
||||
createAutocompleteSection = (downshift: ControllerStateAndHelpers<any>) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
createAutocompleteSection = (downshift: ControllerStateAndHelpers<any>): JSX.Element | null => {
|
||||
const { inputValue = '', closeMenu, highlightedIndex } = downshift;
|
||||
const autocompleteSections = {
|
||||
'Query History': this.props.queryHistory,
|
||||
|
@ -94,7 +99,7 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
<ul className="autosuggest-dropdown-list" key={title}>
|
||||
<li className="autosuggest-dropdown-header">{title}</li>
|
||||
{matches
|
||||
.slice(0, 100) // Limit DOM rendering to 100 results, as DOM rendering is sloooow.
|
||||
.slice(0, 100) // Limit DOM rendering to 100 results, as DOM rendering is slow.
|
||||
.map((result: FuzzyResult) => {
|
||||
const itemProps = downshift.getItemProps({
|
||||
key: result.original,
|
||||
|
@ -161,7 +166,7 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
this.setValue(newValue);
|
||||
};
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
const { executeQuery, value } = this.props;
|
||||
const { height } = this.state;
|
||||
return (
|
||||
|
@ -191,11 +196,13 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
case 'End':
|
||||
// We want to be able to jump to the beginning/end of the input field.
|
||||
// By default, Downshift otherwise jumps to the first/last suggestion item instead.
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(event.nativeEvent as any).preventDownshiftDefault = true;
|
||||
break;
|
||||
case 'ArrowUp':
|
||||
case 'ArrowDown':
|
||||
if (!downshift.isOpen) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(event.nativeEvent as any).preventDownshiftDefault = true;
|
||||
}
|
||||
break;
|
||||
|
@ -203,13 +210,14 @@ class ExpressionInput extends Component<ExpressionInputProps, ExpressionInputSta
|
|||
downshift.closeMenu();
|
||||
break;
|
||||
case 'Escape':
|
||||
if (!downshift.isOpen) {
|
||||
this.exprInputRef.current!.blur();
|
||||
if (!downshift.isOpen && this.exprInputRef.current) {
|
||||
this.exprInputRef.current.blur();
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
} as any)}
|
||||
value={value}
|
||||
/>
|
||||
|
|
|
@ -42,6 +42,7 @@ export interface GraphExemplar {
|
|||
seriesLabels: { [key: string]: string };
|
||||
labels: { [key: string]: string };
|
||||
data: number[][];
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
points: any; // This is used to specify the symbol.
|
||||
color: string;
|
||||
}
|
||||
|
@ -206,7 +207,7 @@ class Graph extends PureComponent<GraphProps, GraphState> {
|
|||
}
|
||||
};
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
const { chartData, selectedExemplarLabels } = this.state;
|
||||
const selectedLabels = selectedExemplarLabels as {
|
||||
exemplar: { [key: string]: string };
|
||||
|
|
|
@ -3,7 +3,7 @@ import { shallow } from 'enzyme';
|
|||
import GraphControls from './GraphControls';
|
||||
import { Button, ButtonGroup, Form, InputGroup, InputGroupAddon, Input } from 'reactstrap';
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||
import { faSquare, faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons';
|
||||
import { faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons';
|
||||
import TimeInput from './TimeInput';
|
||||
|
||||
const defaultGraphControlProps = {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import React, { Component } from 'react';
|
||||
import { Button, ButtonGroup, Form, InputGroup, InputGroupAddon, Input } from 'reactstrap';
|
||||
import { Button, ButtonGroup, Form, Input, InputGroup, InputGroupAddon } from 'reactstrap';
|
||||
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||
import { faPlus, faMinus, faChartArea, faChartLine } from '@fortawesome/free-solid-svg-icons';
|
||||
import { faChartArea, faChartLine, faMinus, faPlus } from '@fortawesome/free-solid-svg-icons';
|
||||
import TimeInput from './TimeInput';
|
||||
import { parseDuration, formatDuration } from '../../utils';
|
||||
import { formatDuration, parseDuration } from '../../utils';
|
||||
|
||||
interface GraphControlsProps {
|
||||
range: number;
|
||||
|
@ -58,7 +58,7 @@ class GraphControls extends Component<GraphControlsProps> {
|
|||
};
|
||||
|
||||
changeRangeInput = (range: number): void => {
|
||||
this.rangeRef.current!.value = formatDuration(range);
|
||||
this.setCurrentRangeValue(formatDuration(range));
|
||||
};
|
||||
|
||||
increaseRange = (): void => {
|
||||
|
@ -81,16 +81,22 @@ class GraphControls extends Component<GraphControlsProps> {
|
|||
}
|
||||
};
|
||||
|
||||
componentDidUpdate(prevProps: GraphControlsProps) {
|
||||
componentDidUpdate(prevProps: GraphControlsProps): void {
|
||||
if (prevProps.range !== this.props.range) {
|
||||
this.changeRangeInput(this.props.range);
|
||||
}
|
||||
if (prevProps.resolution !== this.props.resolution) {
|
||||
this.resolutionRef.current!.value = this.props.resolution !== null ? this.props.resolution.toString() : '';
|
||||
this.setCurrentRangeValue(this.props.resolution !== null ? this.props.resolution.toString() : '');
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
setCurrentRangeValue(value: string): void {
|
||||
if (this.rangeRef.current) {
|
||||
this.rangeRef.current.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
render(): JSX.Element {
|
||||
return (
|
||||
<Form inline className="graph-controls" onSubmit={(e) => e.preventDefault()}>
|
||||
<InputGroup className="range-input" size="sm">
|
||||
|
@ -103,9 +109,13 @@ class GraphControls extends Component<GraphControlsProps> {
|
|||
<Input
|
||||
defaultValue={formatDuration(this.props.range)}
|
||||
innerRef={this.rangeRef}
|
||||
onBlur={() => this.onChangeRangeInput(this.rangeRef.current!.value)}
|
||||
onBlur={() => {
|
||||
if (this.rangeRef.current) {
|
||||
this.onChangeRangeInput(this.rangeRef.current.value);
|
||||
}
|
||||
}}
|
||||
onKeyDown={(e: React.KeyboardEvent<HTMLInputElement>) =>
|
||||
e.key === 'Enter' && this.onChangeRangeInput(this.rangeRef.current!.value)
|
||||
e.key === 'Enter' && this.rangeRef.current && this.onChangeRangeInput(this.rangeRef.current.value)
|
||||
}
|
||||
/>
|
||||
|
||||
|
@ -130,8 +140,10 @@ class GraphControls extends Component<GraphControlsProps> {
|
|||
defaultValue={this.props.resolution !== null ? this.props.resolution.toString() : ''}
|
||||
innerRef={this.resolutionRef}
|
||||
onBlur={() => {
|
||||
const res = parseInt(this.resolutionRef.current!.value);
|
||||
this.props.onChangeResolution(res ? res : null);
|
||||
if (this.resolutionRef.current) {
|
||||
const res = parseInt(this.resolutionRef.current.value);
|
||||
this.props.onChangeResolution(res ? res : null);
|
||||
}
|
||||
}}
|
||||
bsSize="sm"
|
||||
/>
|
||||
|
|
|
@ -53,7 +53,7 @@ export const formatValue = (y: number | null): string => {
|
|||
throw Error("couldn't format a value, this is a bug");
|
||||
};
|
||||
|
||||
export const getHoverColor = (color: string, opacity: number, stacked: boolean) => {
|
||||
export const getHoverColor = (color: string, opacity: number, stacked: boolean): string => {
|
||||
const { r, g, b } = $.color.parse(color);
|
||||
if (!stacked) {
|
||||
return `rgba(${r}, ${g}, ${b}, ${opacity})`;
|
||||
|
@ -67,10 +67,15 @@ export const getHoverColor = (color: string, opacity: number, stacked: boolean)
|
|||
return `rgb(${Math.round(base + opacity * r)},${Math.round(base + opacity * g)},${Math.round(base + opacity * b)})`;
|
||||
};
|
||||
|
||||
export const toHoverColor = (index: number, stacked: boolean) => (series: GraphSeries, i: number) => ({
|
||||
...series,
|
||||
color: getHoverColor(series.color, i !== index ? 0.3 : 1, stacked),
|
||||
});
|
||||
export const toHoverColor =
|
||||
(index: number, stacked: boolean) =>
|
||||
(
|
||||
series: GraphSeries,
|
||||
i: number
|
||||
): { color: string; data: (number | null)[][]; index: number; labels: { [p: string]: string } } => ({
|
||||
...series,
|
||||
color: getHoverColor(series.color, i !== index ? 0.3 : 1, stacked),
|
||||
});
|
||||
|
||||
export const getOptions = (stacked: boolean, useLocalTime: boolean): jquery.flot.plotOptions => {
|
||||
return {
|
||||
|
@ -154,7 +159,10 @@ export const getOptions = (stacked: boolean, useLocalTime: boolean): jquery.flot
|
|||
};
|
||||
|
||||
// This was adapted from Flot's color generation code.
|
||||
export const getColors = (data: { resultType: string; result: Array<{ metric: Metric; values: [number, string][] }> }) => {
|
||||
export const getColors = (data: {
|
||||
resultType: string;
|
||||
result: Array<{ metric: Metric; values: [number, string][] }>;
|
||||
}): Color[] => {
|
||||
const colorPool = ['#edc240', '#afd8f8', '#cb4b4b', '#4da74d', '#9440ed'];
|
||||
const colorPoolSize = colorPool.length;
|
||||
let variation = 0;
|
||||
|
@ -180,6 +188,7 @@ export const getColors = (data: { resultType: string; result: Array<{ metric: Me
|
|||
|
||||
export const normalizeData = ({ queryParams, data, exemplars, stacked }: GraphProps): GraphData => {
|
||||
const colors = getColors(data);
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const { startTime, endTime, resolution } = queryParams!;
|
||||
|
||||
let sum = 0;
|
||||
|
@ -256,7 +265,7 @@ export const normalizeData = ({ queryParams, data, exemplars, stacked }: GraphPr
|
|||
};
|
||||
};
|
||||
|
||||
export const parseValue = (value: string) => {
|
||||
export const parseValue = (value: string): null | number => {
|
||||
const val = parseFloat(value);
|
||||
// "+Inf", "-Inf", "+Inf" will be parsed into NaN by parseFloat(). They
|
||||
// can't be graphed, so show them as gaps (null).
|
||||
|
|
|
@ -5,6 +5,7 @@ import { QueryParams, ExemplarData } from '../../types/types';
|
|||
import { isPresent } from '../../utils';
|
||||
|
||||
interface GraphTabContentProps {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data: any;
|
||||
exemplars: ExemplarData;
|
||||
stacked: boolean;
|
||||
|
|
|
@ -18,36 +18,38 @@ export class Legend extends PureComponent<LegendProps, LegendState> {
|
|||
state = {
|
||||
selectedIndexes: [] as number[],
|
||||
};
|
||||
componentDidUpdate(prevProps: LegendProps) {
|
||||
componentDidUpdate(prevProps: LegendProps): void {
|
||||
if (this.props.shouldReset && prevProps.shouldReset !== this.props.shouldReset) {
|
||||
this.setState({ selectedIndexes: [] });
|
||||
}
|
||||
}
|
||||
handleSeriesSelect = (index: number) => (ev: React.MouseEvent<HTMLDivElement, MouseEvent>) => {
|
||||
// TODO: add proper event type
|
||||
const { selectedIndexes } = this.state;
|
||||
handleSeriesSelect =
|
||||
(index: number) =>
|
||||
(ev: React.MouseEvent<HTMLDivElement, MouseEvent>): void => {
|
||||
// TODO: add proper event type
|
||||
const { selectedIndexes } = this.state;
|
||||
|
||||
let selected = [index];
|
||||
if (ev.ctrlKey || ev.metaKey) {
|
||||
const { chartData } = this.props;
|
||||
if (selectedIndexes.includes(index)) {
|
||||
selected = selectedIndexes.filter((idx) => idx !== index);
|
||||
} else {
|
||||
selected =
|
||||
// Flip the logic - In case none is selected ctrl + click should deselect clicked series.
|
||||
selectedIndexes.length === 0
|
||||
? chartData.reduce<number[]>((acc, _, i) => (i === index ? acc : [...acc, i]), [])
|
||||
: [...selectedIndexes, index]; // Select multiple.
|
||||
let selected = [index];
|
||||
if (ev.ctrlKey || ev.metaKey) {
|
||||
const { chartData } = this.props;
|
||||
if (selectedIndexes.includes(index)) {
|
||||
selected = selectedIndexes.filter((idx) => idx !== index);
|
||||
} else {
|
||||
selected =
|
||||
// Flip the logic - In case none is selected ctrl + click should deselect clicked series.
|
||||
selectedIndexes.length === 0
|
||||
? chartData.reduce<number[]>((acc, _, i) => (i === index ? acc : [...acc, i]), [])
|
||||
: [...selectedIndexes, index]; // Select multiple.
|
||||
}
|
||||
} else if (selectedIndexes.length === 1 && selectedIndexes.includes(index)) {
|
||||
selected = [];
|
||||
}
|
||||
} else if (selectedIndexes.length === 1 && selectedIndexes.includes(index)) {
|
||||
selected = [];
|
||||
}
|
||||
|
||||
this.setState({ selectedIndexes: selected });
|
||||
this.props.onSeriesToggle(selected, index);
|
||||
};
|
||||
this.setState({ selectedIndexes: selected });
|
||||
this.props.onSeriesToggle(selected, index);
|
||||
};
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
const { chartData, onLegendMouseOut, onHover } = this.props;
|
||||
const { selectedIndexes } = this.state;
|
||||
const canUseHover = chartData.length > 1 && selectedIndexes.length === 0;
|
||||
|
|
|
@ -9,16 +9,16 @@ interface Props {
|
|||
}
|
||||
|
||||
class MetricsExplorer extends Component<Props> {
|
||||
handleMetricClick = (query: string) => {
|
||||
handleMetricClick = (query: string): void => {
|
||||
this.props.insertAtCursor(query);
|
||||
this.props.updateShow(false);
|
||||
};
|
||||
|
||||
toggle = () => {
|
||||
toggle = (): void => {
|
||||
this.props.updateShow(!this.props.show);
|
||||
};
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
return (
|
||||
<Modal isOpen={this.props.show} toggle={this.toggle} className="metrics-explorer">
|
||||
<ModalHeader toggle={this.toggle}>Metrics Explorer</ModalHeader>
|
||||
|
|
|
@ -31,6 +31,7 @@ interface PanelProps {
|
|||
}
|
||||
|
||||
interface PanelState {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
data: any; // TODO: Type data.
|
||||
exemplars: ExemplarData;
|
||||
lastQueryParams: QueryParams | null;
|
||||
|
@ -101,6 +102,7 @@ class Panel extends Component<PanelProps, PanelState> {
|
|||
this.executeQuery();
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
executeQuery = async (): Promise<any> => {
|
||||
const { exprInputValue: expr } = this.state;
|
||||
const queryStart = Date.now();
|
||||
|
@ -198,7 +200,8 @@ class Panel extends Component<PanelProps, PanelState> {
|
|||
loading: false,
|
||||
});
|
||||
this.abortInFlightFetch = null;
|
||||
} catch (error: any) {
|
||||
} catch (err: unknown) {
|
||||
const error = err as Error;
|
||||
if (error.name === 'AbortError') {
|
||||
// Aborts are expected, don't show an error for them.
|
||||
return;
|
||||
|
@ -259,7 +262,7 @@ class Panel extends Component<PanelProps, PanelState> {
|
|||
this.setOptions({ range: endTime - startTime, endTime: endTime });
|
||||
};
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
const { pastQueries, metricNames, options } = this.props;
|
||||
return (
|
||||
<div className="panel">
|
||||
|
|
|
@ -11,7 +11,7 @@ import { API_PATH } from '../../constants/constants';
|
|||
|
||||
export type PanelMeta = { key: string; options: PanelOptions; id: string };
|
||||
|
||||
export const updateURL = (nextPanels: PanelMeta[]) => {
|
||||
export const updateURL = (nextPanels: PanelMeta[]): void => {
|
||||
const query = encodePanelOptionsToQueryString(nextPanels);
|
||||
window.history.pushState({}, '', query);
|
||||
};
|
||||
|
|
|
@ -29,7 +29,7 @@ const SeriesName: FC<SeriesNameProps> = ({ labels, format }) => {
|
|||
|
||||
return (
|
||||
<div>
|
||||
<span className="legend-metric-name">{labels!.__name__ || ''}</span>
|
||||
<span className="legend-metric-name">{labels ? labels.__name__ : ''}</span>
|
||||
<span className="legend-label-brace">{'{'}</span>
|
||||
{labelNodes}
|
||||
<span className="legend-label-brace">{'}'}</span>
|
||||
|
@ -46,7 +46,7 @@ const SeriesName: FC<SeriesNameProps> = ({ labels, format }) => {
|
|||
}
|
||||
// Return a simple text node. This is much faster to scroll through
|
||||
// for longer lists (hundreds of items).
|
||||
return <>{metricToSeriesName(labels!)}</>;
|
||||
return <>{metricToSeriesName(labels)}</>;
|
||||
};
|
||||
|
||||
export default SeriesName;
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import $ from 'jquery';
|
||||
import React, { Component } from 'react';
|
||||
import { Button, InputGroup, InputGroupAddon, Input } from 'reactstrap';
|
||||
import { Button, Input, InputGroup, InputGroupAddon } from 'reactstrap';
|
||||
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
|
@ -11,11 +11,11 @@ import '../../../node_modules/tempusdominus-bootstrap-4/build/css/tempusdominus-
|
|||
import { dom, library } from '@fortawesome/fontawesome-svg-core';
|
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
|
||||
import {
|
||||
faArrowDown,
|
||||
faArrowUp,
|
||||
faCalendarCheck,
|
||||
faChevronLeft,
|
||||
faChevronRight,
|
||||
faCalendarCheck,
|
||||
faArrowUp,
|
||||
faArrowDown,
|
||||
faTimes,
|
||||
} from '@fortawesome/free-solid-svg-icons';
|
||||
|
||||
|
@ -33,6 +33,7 @@ interface TimeInputProps {
|
|||
|
||||
class TimeInput extends Component<TimeInputProps> {
|
||||
private timeInputRef = React.createRef<HTMLInputElement>();
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
private $time: any = null;
|
||||
|
||||
getBaseTime = (): number => {
|
||||
|
@ -60,7 +61,10 @@ class TimeInput extends Component<TimeInputProps> {
|
|||
};
|
||||
|
||||
componentDidMount(): void {
|
||||
this.$time = $(this.timeInputRef.current!);
|
||||
if (!this.timeInputRef.current) {
|
||||
return;
|
||||
}
|
||||
this.$time = $(this.timeInputRef.current);
|
||||
|
||||
this.$time.datetimepicker({
|
||||
icons: {
|
||||
|
@ -78,6 +82,7 @@ class TimeInput extends Component<TimeInputProps> {
|
|||
defaultDate: this.props.time,
|
||||
});
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
this.$time.on('change.datetimepicker', (e: any) => {
|
||||
// The end time can also be set by dragging a section on the graph,
|
||||
// and that value will have decimal places.
|
||||
|
@ -101,7 +106,7 @@ class TimeInput extends Component<TimeInputProps> {
|
|||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
render(): JSX.Element {
|
||||
return (
|
||||
<InputGroup className="time-input" size="sm">
|
||||
<InputGroupAddon addonType="prepend">
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import React, { FC } from 'react';
|
||||
import { useFetch } from '../../hooks/useFetch';
|
||||
import { LabelsTable } from './LabelsTable';
|
||||
import { Target, Labels, DroppedTarget } from '../targets/target';
|
||||
import { DroppedTarget, Labels, Target } from '../targets/target';
|
||||
|
||||
import { withStatusIndicator } from '../../components/withStatusIndicator';
|
||||
import { mapObjEntries } from '../../utils';
|
||||
|
@ -19,7 +19,10 @@ export interface TargetLabels {
|
|||
isDropped: boolean;
|
||||
}
|
||||
|
||||
export const processSummary = (activeTargets: Target[], droppedTargets: DroppedTarget[]) => {
|
||||
export const processSummary = (
|
||||
activeTargets: Target[],
|
||||
droppedTargets: DroppedTarget[]
|
||||
): Record<string, { active: number; total: number }> => {
|
||||
const targets: Record<string, { active: number; total: number }> = {};
|
||||
|
||||
// Get targets of each type along with the total and active end points
|
||||
|
|
|
@ -12,6 +12,7 @@ interface StatusPageProps {
|
|||
|
||||
export const statusConfig: Record<
|
||||
string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
{ title?: string; customizeValue?: (v: any, key: string) => any; customRow?: boolean; skip?: boolean }
|
||||
> = {
|
||||
startTime: { title: 'Start time', customizeValue: (v: string) => new Date(v).toUTCString() },
|
||||
|
@ -57,7 +58,7 @@ export const StatusContent: FC<StatusPageProps> = ({ data, title }) => {
|
|||
<Table className="h-auto" size="sm" bordered striped>
|
||||
<tbody>
|
||||
{Object.entries(data).map(([k, v]) => {
|
||||
const { title = k, customizeValue = (val: any) => val, customRow, skip } = statusConfig[k] || {};
|
||||
const { title = k, customizeValue = (val: string) => val, customRow, skip } = statusConfig[k] || {};
|
||||
if (skip) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -10,10 +10,11 @@ const EndpointLink: FC<EndpointLinkProps> = ({ endpoint, globalUrl }) => {
|
|||
let url: URL;
|
||||
try {
|
||||
url = new URL(endpoint);
|
||||
} catch (e: any) {
|
||||
} catch (err: unknown) {
|
||||
const error = err as Error;
|
||||
return (
|
||||
<Alert color="danger">
|
||||
<strong>Error:</strong> {e.message}
|
||||
<strong>Error:</strong> {error.message}
|
||||
</Alert>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,9 @@ describe('ScrapePoolList', () => {
|
|||
const div = document.createElement('div');
|
||||
div.id = `series-labels-${pool}-${idx}`;
|
||||
document.body.appendChild(div);
|
||||
const div2 = document.createElement('div');
|
||||
div2.id = `scrape-duration-${pool}-${idx}`;
|
||||
document.body.appendChild(div2);
|
||||
});
|
||||
});
|
||||
mock = fetchMock.mockResponse(JSON.stringify(sampleApiResponse));
|
||||
|
|
|
@ -57,6 +57,9 @@ describe('ScrapePoolPanel', () => {
|
|||
const div = document.createElement('div');
|
||||
div.id = `series-labels-prometheus-0`;
|
||||
document.body.appendChild(div);
|
||||
const div2 = document.createElement('div');
|
||||
div2.id = `scrape-duration-prometheus-0`;
|
||||
document.body.appendChild(div2);
|
||||
const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />);
|
||||
|
||||
const btn = scrapePoolPanel.find(Button);
|
||||
|
|
|
@ -5,9 +5,10 @@ import styles from './ScrapePoolPanel.module.css';
|
|||
import { Target } from './target';
|
||||
import EndpointLink from './EndpointLink';
|
||||
import TargetLabels from './TargetLabels';
|
||||
import TargetScrapeDuration from './TargetScrapeDuration';
|
||||
import { now } from 'moment';
|
||||
import { ToggleMoreLess } from '../../components/ToggleMoreLess';
|
||||
import { formatRelative, humanizeDuration } from '../../utils';
|
||||
import { formatRelative } from '../../utils';
|
||||
|
||||
interface PanelProps {
|
||||
scrapePool: string;
|
||||
|
@ -54,6 +55,8 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
|
|||
lastScrape,
|
||||
lastScrapeDuration,
|
||||
health,
|
||||
scrapeInterval,
|
||||
scrapeTimeout,
|
||||
} = target;
|
||||
const color = getColor(health);
|
||||
|
||||
|
@ -69,7 +72,15 @@ const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup, expanded, to
|
|||
<TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} />
|
||||
</td>
|
||||
<td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td>
|
||||
<td className={styles['scrape-duration']}>{humanizeDuration(lastScrapeDuration * 1000)}</td>
|
||||
<td className={styles['scrape-duration']}>
|
||||
<TargetScrapeDuration
|
||||
duration={lastScrapeDuration}
|
||||
scrapePool={scrapePool}
|
||||
idx={idx}
|
||||
interval={scrapeInterval}
|
||||
timeout={scrapeTimeout}
|
||||
/>
|
||||
</td>
|
||||
<td className={styles.errors}>{lastError ? <span className="text-danger">{lastError}</span> : null}</td>
|
||||
</tr>
|
||||
);
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
import React, { FC, Fragment, useState } from 'react';
|
||||
import { Tooltip } from 'reactstrap';
|
||||
import 'css.escape';
|
||||
import { humanizeDuration } from '../../utils';
|
||||
|
||||
export interface TargetScrapeDurationProps {
|
||||
duration: number;
|
||||
interval: string;
|
||||
timeout: string;
|
||||
idx: number;
|
||||
scrapePool: string;
|
||||
}
|
||||
|
||||
const TargetScrapeDuration: FC<TargetScrapeDurationProps> = ({ duration, interval, timeout, idx, scrapePool }) => {
|
||||
const [scrapeTooltipOpen, setScrapeTooltipOpen] = useState<boolean>(false);
|
||||
const id = `scrape-duration-${scrapePool}-${idx}`;
|
||||
|
||||
return (
|
||||
<>
|
||||
<div id={id} className="scrape-duration-container">
|
||||
{humanizeDuration(duration * 1000)}
|
||||
</div>
|
||||
<Tooltip
|
||||
isOpen={scrapeTooltipOpen}
|
||||
toggle={() => setScrapeTooltipOpen(!scrapeTooltipOpen)}
|
||||
target={CSS.escape(id)}
|
||||
style={{ maxWidth: 'none', textAlign: 'left' }}
|
||||
>
|
||||
<Fragment>
|
||||
<span>Interval: {interval}</span>
|
||||
<br />
|
||||
</Fragment>
|
||||
<Fragment>
|
||||
<span>Timeout: {timeout}</span>
|
||||
</Fragment>
|
||||
</Tooltip>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
export default TargetScrapeDuration;
|
|
@ -23,6 +23,8 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
lastScrape: '2019-11-04T11:52:14.759299-07:00',
|
||||
lastScrapeDuration: 36560147,
|
||||
health: 'up',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
{
|
||||
discoveredLabels: {
|
||||
|
@ -43,6 +45,8 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
lastScrape: '2019-11-04T11:52:24.731096-07:00',
|
||||
lastScrapeDuration: 49448763,
|
||||
health: 'up',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
{
|
||||
discoveredLabels: {
|
||||
|
@ -63,6 +67,8 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
lastScrape: '2019-11-04T11:52:13.516654-07:00',
|
||||
lastScrapeDuration: 120916592,
|
||||
health: 'down',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -87,6 +93,8 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
lastScrape: '2019-11-04T11:52:14.145703-07:00',
|
||||
lastScrapeDuration: 3842307,
|
||||
health: 'up',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -111,6 +119,8 @@ export const targetGroups: ScrapePools = Object.freeze({
|
|||
lastScrape: '2019-11-04T11:52:18.479731-07:00',
|
||||
lastScrapeDuration: 4050976,
|
||||
health: 'up',
|
||||
scrapeInterval: '15s',
|
||||
scrapeTimeout: '500ms',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -12,6 +12,8 @@ export interface Target {
|
|||
lastScrape: string;
|
||||
lastScrapeDuration: number;
|
||||
health: string;
|
||||
scrapeInterval: string;
|
||||
scrapeTimeout: string;
|
||||
}
|
||||
|
||||
export interface DroppedTarget {
|
||||
|
|
|
@ -217,11 +217,13 @@ export const parseOption = (param: string): Partial<PanelOptions> => {
|
|||
return {};
|
||||
};
|
||||
|
||||
export const formatParam = (key: string) => (paramName: string, value: number | string | boolean) => {
|
||||
return `g${key}.${paramName}=${encodeURIComponent(value)}`;
|
||||
};
|
||||
export const formatParam =
|
||||
(key: string) =>
|
||||
(paramName: string, value: number | string | boolean): string => {
|
||||
return `g${key}.${paramName}=${encodeURIComponent(value)}`;
|
||||
};
|
||||
|
||||
export const toQueryString = ({ key, options }: PanelMeta) => {
|
||||
export const toQueryString = ({ key, options }: PanelMeta): string => {
|
||||
const formatWithKey = formatParam(key);
|
||||
const { expr, type, stacked, range, endTime, resolution, showExemplars } = options;
|
||||
const time = isPresent(endTime) ? formatTime(endTime) : false;
|
||||
|
@ -247,16 +249,20 @@ export const createExpressionLink = (expr: string): string => {
|
|||
export const mapObjEntries = <T, key extends keyof T, Z>(
|
||||
o: T,
|
||||
cb: ([k, v]: [string, T[key]], i: number, arr: [string, T[key]][]) => Z
|
||||
) => Object.entries(o).map(cb);
|
||||
): Z[] => Object.entries(o).map(cb);
|
||||
|
||||
export const callAll =
|
||||
(...fns: Array<(...args: any) => void>) =>
|
||||
(...args: any) => {
|
||||
(
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
...fns: Array<(...args: any) => void>
|
||||
) =>
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/explicit-module-boundary-types
|
||||
(...args: any): void => {
|
||||
// eslint-disable-next-line prefer-spread
|
||||
fns.filter(Boolean).forEach((fn) => fn.apply(null, args));
|
||||
};
|
||||
|
||||
export const parsePrometheusFloat = (value: string): number | string => {
|
||||
export const parsePrometheusFloat = (value: string): string | number => {
|
||||
if (isNaN(Number(value))) {
|
||||
return value;
|
||||
} else {
|
||||
|
|
Loading…
Reference in New Issue