2016-04-13 14:08:22 +00:00
|
|
|
// Copyright 2016 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2015-06-04 16:07:57 +00:00
|
|
|
package v1
|
|
|
|
|
|
|
|
import (
|
2017-10-23 20:28:17 +00:00
|
|
|
"bytes"
|
2017-10-25 04:21:42 +00:00
|
|
|
"context"
|
2015-06-04 16:07:57 +00:00
|
|
|
"encoding/json"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2019-08-19 20:16:10 +00:00
|
|
|
"io"
|
2015-07-02 08:37:19 +00:00
|
|
|
"io/ioutil"
|
2018-02-08 17:28:55 +00:00
|
|
|
"math"
|
2015-06-04 16:07:57 +00:00
|
|
|
"net/http"
|
|
|
|
"net/http/httptest"
|
|
|
|
"net/url"
|
2018-11-15 13:22:16 +00:00
|
|
|
"os"
|
2015-06-04 16:07:57 +00:00
|
|
|
"reflect"
|
2019-12-09 21:36:38 +00:00
|
|
|
"sort"
|
2017-11-11 00:53:48 +00:00
|
|
|
"strings"
|
2015-06-04 16:07:57 +00:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2018-09-25 18:14:00 +00:00
|
|
|
"github.com/go-kit/kit/log"
|
2017-10-23 20:28:17 +00:00
|
|
|
"github.com/gogo/protobuf/proto"
|
|
|
|
"github.com/golang/snappy"
|
2018-09-07 21:26:04 +00:00
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
2018-06-16 17:26:37 +00:00
|
|
|
config_util "github.com/prometheus/common/config"
|
2015-08-20 15:18:46 +00:00
|
|
|
"github.com/prometheus/common/model"
|
2018-06-16 17:26:37 +00:00
|
|
|
"github.com/prometheus/common/promlog"
|
2015-09-24 15:07:11 +00:00
|
|
|
"github.com/prometheus/common/route"
|
2015-06-04 16:07:57 +00:00
|
|
|
|
2017-05-11 15:09:24 +00:00
|
|
|
"github.com/prometheus/prometheus/config"
|
2018-09-25 19:07:34 +00:00
|
|
|
"github.com/prometheus/prometheus/pkg/gate"
|
2016-12-30 09:43:44 +00:00
|
|
|
"github.com/prometheus/prometheus/pkg/labels"
|
2019-12-04 19:33:01 +00:00
|
|
|
"github.com/prometheus/prometheus/pkg/textparse"
|
2016-12-30 09:43:44 +00:00
|
|
|
"github.com/prometheus/prometheus/pkg/timestamp"
|
2017-10-23 20:28:17 +00:00
|
|
|
"github.com/prometheus/prometheus/prompb"
|
2015-06-04 16:07:57 +00:00
|
|
|
"github.com/prometheus/prometheus/promql"
|
2020-02-03 18:23:07 +00:00
|
|
|
"github.com/prometheus/prometheus/promql/parser"
|
2018-03-25 16:50:34 +00:00
|
|
|
"github.com/prometheus/prometheus/rules"
|
2018-02-01 09:55:07 +00:00
|
|
|
"github.com/prometheus/prometheus/scrape"
|
2018-05-08 08:48:13 +00:00
|
|
|
"github.com/prometheus/prometheus/storage"
|
2017-10-23 20:28:17 +00:00
|
|
|
"github.com/prometheus/prometheus/storage/remote"
|
2019-11-18 19:53:33 +00:00
|
|
|
"github.com/prometheus/prometheus/tsdb"
|
2019-08-09 01:35:39 +00:00
|
|
|
"github.com/prometheus/prometheus/util/teststorage"
|
2018-03-25 16:50:34 +00:00
|
|
|
"github.com/prometheus/prometheus/util/testutil"
|
2015-06-04 16:07:57 +00:00
|
|
|
)
|
|
|
|
|
2019-12-04 19:33:01 +00:00
|
|
|
// testMetaStore satisfies the scrape.MetricMetadataStore interface.
|
|
|
|
// It is used to inject specific metadata as part of a test case.
|
|
|
|
type testMetaStore struct {
|
|
|
|
Metadata []scrape.MetricMetadata
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *testMetaStore) ListMetadata() []scrape.MetricMetadata {
|
|
|
|
return s.Metadata
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *testMetaStore) GetMetadata(metric string) (scrape.MetricMetadata, bool) {
|
|
|
|
for _, m := range s.Metadata {
|
|
|
|
if metric == m.Metric {
|
|
|
|
return m, true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return scrape.MetricMetadata{}, false
|
|
|
|
}
|
|
|
|
|
2020-01-29 11:13:18 +00:00
|
|
|
func (s *testMetaStore) SizeMetadata() int { return 0 }
|
|
|
|
func (s *testMetaStore) LengthMetadata() int { return 0 }
|
|
|
|
|
2019-12-04 19:33:01 +00:00
|
|
|
// testTargetRetriever represents a list of targets to scrape.
|
|
|
|
// It is used to represent targets as part of test cases.
|
2019-12-04 11:08:21 +00:00
|
|
|
type testTargetRetriever struct {
|
|
|
|
activeTargets map[string][]*scrape.Target
|
|
|
|
droppedTargets map[string][]*scrape.Target
|
|
|
|
}
|
|
|
|
|
|
|
|
type testTargetParams struct {
|
|
|
|
Identifier string
|
|
|
|
Labels []labels.Label
|
|
|
|
DiscoveredLabels []labels.Label
|
|
|
|
Params url.Values
|
|
|
|
Reports []*testReport
|
|
|
|
Active bool
|
|
|
|
}
|
|
|
|
|
|
|
|
type testReport struct {
|
|
|
|
Start time.Time
|
|
|
|
Duration time.Duration
|
|
|
|
Error error
|
|
|
|
}
|
|
|
|
|
|
|
|
func newTestTargetRetriever(targetsInfo []*testTargetParams) *testTargetRetriever {
|
|
|
|
var activeTargets map[string][]*scrape.Target
|
|
|
|
var droppedTargets map[string][]*scrape.Target
|
|
|
|
activeTargets = make(map[string][]*scrape.Target)
|
|
|
|
droppedTargets = make(map[string][]*scrape.Target)
|
|
|
|
|
|
|
|
for _, t := range targetsInfo {
|
|
|
|
nt := scrape.NewTarget(t.Labels, t.DiscoveredLabels, t.Params)
|
|
|
|
|
|
|
|
for _, r := range t.Reports {
|
|
|
|
nt.Report(r.Start, r.Duration, r.Error)
|
|
|
|
}
|
|
|
|
|
|
|
|
if t.Active {
|
|
|
|
activeTargets[t.Identifier] = []*scrape.Target{nt}
|
|
|
|
} else {
|
|
|
|
droppedTargets[t.Identifier] = []*scrape.Target{nt}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &testTargetRetriever{
|
|
|
|
activeTargets: activeTargets,
|
|
|
|
droppedTargets: droppedTargets,
|
|
|
|
}
|
|
|
|
}
|
2016-12-02 12:31:43 +00:00
|
|
|
|
2019-11-11 21:42:24 +00:00
|
|
|
var (
|
|
|
|
scrapeStart = time.Now().Add(-11 * time.Second)
|
|
|
|
)
|
|
|
|
|
2018-09-26 09:20:56 +00:00
|
|
|
func (t testTargetRetriever) TargetsActive() map[string][]*scrape.Target {
|
2019-12-04 11:08:21 +00:00
|
|
|
return t.activeTargets
|
2018-02-21 17:26:18 +00:00
|
|
|
}
|
2019-12-04 11:08:21 +00:00
|
|
|
|
2018-09-26 09:20:56 +00:00
|
|
|
func (t testTargetRetriever) TargetsDropped() map[string][]*scrape.Target {
|
2019-12-04 11:08:21 +00:00
|
|
|
return t.droppedTargets
|
2016-12-02 12:31:43 +00:00
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
func (t *testTargetRetriever) SetMetadataStoreForTargets(identifier string, metadata scrape.MetricMetadataStore) error {
|
2019-12-04 19:33:01 +00:00
|
|
|
targets, ok := t.activeTargets[identifier]
|
|
|
|
|
|
|
|
if !ok {
|
|
|
|
return errors.New("targets not found")
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, at := range targets {
|
|
|
|
at.SetMetadataStore(metadata)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
func (t *testTargetRetriever) ResetMetadataStore() {
|
|
|
|
for _, at := range t.activeTargets {
|
|
|
|
for _, tt := range at {
|
|
|
|
tt.SetMetadataStore(&testMetaStore{})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-21 09:00:07 +00:00
|
|
|
type testAlertmanagerRetriever struct{}
|
2017-01-13 09:20:11 +00:00
|
|
|
|
2018-02-21 09:00:07 +00:00
|
|
|
func (t testAlertmanagerRetriever) Alertmanagers() []*url.URL {
|
|
|
|
return []*url.URL{
|
|
|
|
{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: "alertmanager.example.com:8080",
|
|
|
|
Path: "/api/v1/alerts",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t testAlertmanagerRetriever) DroppedAlertmanagers() []*url.URL {
|
|
|
|
return []*url.URL{
|
|
|
|
{
|
|
|
|
Scheme: "http",
|
|
|
|
Host: "dropped.alertmanager.example.com:8080",
|
|
|
|
Path: "/api/v1/alerts",
|
|
|
|
},
|
|
|
|
}
|
2016-12-02 12:31:43 +00:00
|
|
|
}
|
|
|
|
|
2018-06-27 07:15:17 +00:00
|
|
|
type rulesRetrieverMock struct {
|
|
|
|
testing *testing.T
|
2018-03-25 16:50:34 +00:00
|
|
|
}
|
|
|
|
|
2018-06-27 07:15:17 +00:00
|
|
|
func (m rulesRetrieverMock) AlertingRules() []*rules.AlertingRule {
|
2020-02-03 18:23:07 +00:00
|
|
|
expr1, err := parser.ParseExpr(`absent(test_metric3) != 1`)
|
2018-03-25 16:50:34 +00:00
|
|
|
if err != nil {
|
2018-06-27 07:15:17 +00:00
|
|
|
m.testing.Fatalf("unable to parse alert expression: %s", err)
|
2018-03-25 16:50:34 +00:00
|
|
|
}
|
2020-02-03 18:23:07 +00:00
|
|
|
expr2, err := parser.ParseExpr(`up == 1`)
|
2018-03-25 16:50:34 +00:00
|
|
|
if err != nil {
|
2018-06-27 07:15:17 +00:00
|
|
|
m.testing.Fatalf("Unable to parse alert expression: %s", err)
|
2018-03-25 16:50:34 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
rule1 := rules.NewAlertingRule(
|
|
|
|
"test_metric3",
|
|
|
|
expr1,
|
|
|
|
time.Second,
|
|
|
|
labels.Labels{},
|
|
|
|
labels.Labels{},
|
2019-04-15 16:52:58 +00:00
|
|
|
labels.Labels{},
|
2018-08-02 10:18:24 +00:00
|
|
|
true,
|
2018-03-25 16:50:34 +00:00
|
|
|
log.NewNopLogger(),
|
|
|
|
)
|
|
|
|
rule2 := rules.NewAlertingRule(
|
|
|
|
"test_metric4",
|
|
|
|
expr2,
|
|
|
|
time.Second,
|
|
|
|
labels.Labels{},
|
|
|
|
labels.Labels{},
|
2019-04-15 16:52:58 +00:00
|
|
|
labels.Labels{},
|
2018-08-02 10:18:24 +00:00
|
|
|
true,
|
2018-03-25 16:50:34 +00:00
|
|
|
log.NewNopLogger(),
|
|
|
|
)
|
|
|
|
var r []*rules.AlertingRule
|
|
|
|
r = append(r, rule1)
|
|
|
|
r = append(r, rule2)
|
|
|
|
return r
|
|
|
|
}
|
|
|
|
|
2018-06-27 07:15:17 +00:00
|
|
|
func (m rulesRetrieverMock) RuleGroups() []*rules.Group {
|
|
|
|
var ar rulesRetrieverMock
|
2018-03-25 16:50:34 +00:00
|
|
|
arules := ar.AlertingRules()
|
2019-08-09 01:35:39 +00:00
|
|
|
storage := teststorage.New(m.testing)
|
2018-03-25 16:50:34 +00:00
|
|
|
defer storage.Close()
|
|
|
|
|
2018-10-02 11:59:19 +00:00
|
|
|
engineOpts := promql.EngineOpts{
|
2020-01-28 20:38:49 +00:00
|
|
|
Logger: nil,
|
|
|
|
Reg: nil,
|
|
|
|
MaxSamples: 10,
|
|
|
|
Timeout: 100 * time.Second,
|
2018-10-02 11:59:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
engine := promql.NewEngine(engineOpts)
|
2018-03-25 16:50:34 +00:00
|
|
|
opts := &rules.ManagerOptions{
|
|
|
|
QueryFunc: rules.EngineQueryFunc(engine, storage),
|
|
|
|
Appendable: storage,
|
|
|
|
Context: context.Background(),
|
|
|
|
Logger: log.NewNopLogger(),
|
|
|
|
}
|
|
|
|
|
|
|
|
var r []rules.Rule
|
|
|
|
|
|
|
|
for _, alertrule := range arules {
|
|
|
|
r = append(r, alertrule)
|
|
|
|
}
|
|
|
|
|
2020-02-03 18:23:07 +00:00
|
|
|
recordingExpr, err := parser.ParseExpr(`vector(1)`)
|
2018-06-27 07:15:17 +00:00
|
|
|
if err != nil {
|
|
|
|
m.testing.Fatalf("unable to parse alert expression: %s", err)
|
|
|
|
}
|
|
|
|
recordingRule := rules.NewRecordingRule("recording-rule-1", recordingExpr, labels.Labels{})
|
|
|
|
r = append(r, recordingRule)
|
|
|
|
|
2020-02-12 15:22:18 +00:00
|
|
|
group := rules.NewGroup(rules.GroupOptions{
|
|
|
|
Name: "grp",
|
|
|
|
File: "/path/to/file",
|
|
|
|
Interval: time.Second,
|
|
|
|
Rules: r,
|
|
|
|
ShouldRestore: false,
|
|
|
|
Opts: opts,
|
|
|
|
})
|
2018-03-25 16:50:34 +00:00
|
|
|
return []*rules.Group{group}
|
|
|
|
}
|
|
|
|
|
2017-05-11 15:09:24 +00:00
|
|
|
var samplePrometheusCfg = config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{},
|
|
|
|
AlertingConfig: config.AlertingConfig{},
|
|
|
|
RuleFiles: []string{},
|
|
|
|
ScrapeConfigs: []*config.ScrapeConfig{},
|
|
|
|
RemoteWriteConfigs: []*config.RemoteWriteConfig{},
|
|
|
|
RemoteReadConfigs: []*config.RemoteReadConfig{},
|
|
|
|
}
|
|
|
|
|
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 08:49:02 +00:00
|
|
|
var sampleFlagMap = map[string]string{
|
|
|
|
"flag1": "value1",
|
|
|
|
"flag2": "value2",
|
|
|
|
}
|
|
|
|
|
2015-06-04 16:07:57 +00:00
|
|
|
func TestEndpoints(t *testing.T) {
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo="bar"} 0+100x100
|
|
|
|
test_metric1{foo="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo"} 1+0x100
|
|
|
|
`)
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, err)
|
2015-06-04 16:07:57 +00:00
|
|
|
defer suite.Close()
|
|
|
|
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, suite.Run())
|
2015-06-04 16:07:57 +00:00
|
|
|
|
2016-12-30 09:43:44 +00:00
|
|
|
now := time.Now()
|
2016-12-02 12:31:43 +00:00
|
|
|
|
2018-06-27 07:15:17 +00:00
|
|
|
t.Run("local", func(t *testing.T) {
|
|
|
|
var algr rulesRetrieverMock
|
|
|
|
algr.testing = t
|
2018-03-25 16:50:34 +00:00
|
|
|
|
|
|
|
algr.AlertingRules()
|
|
|
|
|
|
|
|
algr.RuleGroups()
|
|
|
|
|
2019-12-04 19:33:01 +00:00
|
|
|
testTargetRetriever := setupTestTargetRetriever(t)
|
2019-12-04 11:08:21 +00:00
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
2019-12-04 11:08:21 +00:00
|
|
|
targetRetriever: testTargetRetriever,
|
2018-06-16 17:26:37 +00:00
|
|
|
alertmanagerRetriever: testAlertmanagerRetriever{},
|
2018-11-19 10:21:14 +00:00
|
|
|
flagsMap: sampleFlagMap,
|
2018-10-16 07:41:45 +00:00
|
|
|
now: func() time.Time { return now },
|
|
|
|
config: func() config.Config { return samplePrometheusCfg },
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
rulesRetriever: algr,
|
2018-06-16 17:26:37 +00:00
|
|
|
}
|
2016-12-02 12:31:43 +00:00
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
testEndpoints(t, api, testTargetRetriever, true)
|
2018-06-16 17:26:37 +00:00
|
|
|
})
|
2017-01-13 09:20:11 +00:00
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
// Run all the API tests against a API that is wired to forward queries via
|
|
|
|
// the remote read client to a test server, which in turn sends them to the
|
2018-06-18 16:32:44 +00:00
|
|
|
// data from the test suite.
|
2018-06-16 17:26:37 +00:00
|
|
|
t.Run("remote", func(t *testing.T) {
|
|
|
|
server := setupRemote(suite.Storage())
|
|
|
|
defer server.Close()
|
|
|
|
|
|
|
|
u, err := url.Parse(server.URL)
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, err)
|
2018-06-16 17:26:37 +00:00
|
|
|
|
|
|
|
al := promlog.AllowedLevel{}
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, al.Set("debug"))
|
2019-07-29 17:00:30 +00:00
|
|
|
|
2018-11-23 13:22:40 +00:00
|
|
|
af := promlog.AllowedFormat{}
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, af.Set("logfmt"))
|
2019-07-29 17:00:30 +00:00
|
|
|
|
2018-11-23 13:22:40 +00:00
|
|
|
promlogConfig := promlog.Config{
|
|
|
|
Level: &al,
|
|
|
|
Format: &af,
|
|
|
|
}
|
|
|
|
|
2018-09-07 21:26:04 +00:00
|
|
|
dbDir, err := ioutil.TempDir("", "tsdb-api-ready")
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
defer os.RemoveAll(dbDir)
|
|
|
|
|
|
|
|
remote := remote.NewStorage(promlog.New(&promlogConfig), prometheus.DefaultRegisterer, func() (int64, error) {
|
2018-06-16 17:26:37 +00:00
|
|
|
return 0, nil
|
2018-09-07 21:26:04 +00:00
|
|
|
}, dbDir, 1*time.Second)
|
2018-06-16 17:26:37 +00:00
|
|
|
|
|
|
|
err = remote.ApplyConfig(&config.Config{
|
|
|
|
RemoteReadConfigs: []*config.RemoteReadConfig{
|
|
|
|
{
|
|
|
|
URL: &config_util.URL{URL: u},
|
|
|
|
RemoteTimeout: model.Duration(1 * time.Second),
|
|
|
|
ReadRecent: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
})
|
2019-09-18 10:40:50 +00:00
|
|
|
testutil.Ok(t, err)
|
2018-06-16 17:26:37 +00:00
|
|
|
|
2018-06-27 07:15:17 +00:00
|
|
|
var algr rulesRetrieverMock
|
|
|
|
algr.testing = t
|
2018-03-25 16:50:34 +00:00
|
|
|
|
|
|
|
algr.AlertingRules()
|
|
|
|
|
|
|
|
algr.RuleGroups()
|
|
|
|
|
2019-12-04 19:33:01 +00:00
|
|
|
testTargetRetriever := setupTestTargetRetriever(t)
|
2019-12-04 11:08:21 +00:00
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
api := &API{
|
|
|
|
Queryable: remote,
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
2019-12-04 11:08:21 +00:00
|
|
|
targetRetriever: testTargetRetriever,
|
2018-06-16 17:26:37 +00:00
|
|
|
alertmanagerRetriever: testAlertmanagerRetriever{},
|
2018-11-19 10:21:14 +00:00
|
|
|
flagsMap: sampleFlagMap,
|
2018-10-16 07:41:45 +00:00
|
|
|
now: func() time.Time { return now },
|
|
|
|
config: func() config.Config { return samplePrometheusCfg },
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
rulesRetriever: algr,
|
2018-06-16 17:26:37 +00:00
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
testEndpoints(t, api, testTargetRetriever, false)
|
2018-06-16 17:26:37 +00:00
|
|
|
})
|
2018-11-19 10:21:14 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestLabelNames(t *testing.T) {
|
|
|
|
// TestEndpoints doesn't have enough label names to test api.labelNames
|
|
|
|
// endpoint properly. Hence we test it separately.
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo1="bar", baz="abc"} 0+100x100
|
|
|
|
test_metric1{foo2="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo"} 1+0x100
|
|
|
|
test_metric2{foo="boo", xyz="qwerty"} 1+0x100
|
|
|
|
`)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
defer suite.Close()
|
|
|
|
testutil.Ok(t, suite.Run())
|
|
|
|
|
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
}
|
|
|
|
request := func(m string) (*http.Request, error) {
|
|
|
|
if m == http.MethodPost {
|
|
|
|
r, err := http.NewRequest(m, "http://example.com", nil)
|
|
|
|
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
|
|
|
return r, err
|
|
|
|
}
|
|
|
|
return http.NewRequest(m, "http://example.com", nil)
|
|
|
|
}
|
|
|
|
for _, method := range []string{http.MethodGet, http.MethodPost} {
|
|
|
|
ctx := context.Background()
|
|
|
|
req, err := request(method)
|
|
|
|
testutil.Ok(t, err)
|
2018-11-30 14:27:12 +00:00
|
|
|
res := api.labelNames(req.WithContext(ctx))
|
|
|
|
assertAPIError(t, res.err, "")
|
|
|
|
assertAPIResponse(t, res.data, []string{"__name__", "baz", "foo", "foo1", "foo2", "xyz"})
|
2018-11-19 10:21:14 +00:00
|
|
|
}
|
2018-06-16 17:26:37 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 19:33:01 +00:00
|
|
|
func setupTestTargetRetriever(t *testing.T) *testTargetRetriever {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
targets := []*testTargetParams{
|
|
|
|
{
|
|
|
|
Identifier: "test",
|
|
|
|
Labels: labels.FromMap(map[string]string{
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.AddressLabel: "example.com:8080",
|
|
|
|
model.MetricsPathLabel: "/metrics",
|
|
|
|
model.JobLabel: "test",
|
|
|
|
}),
|
|
|
|
DiscoveredLabels: nil,
|
|
|
|
Params: url.Values{},
|
|
|
|
Reports: []*testReport{{scrapeStart, 70 * time.Millisecond, nil}},
|
|
|
|
Active: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Identifier: "blackbox",
|
|
|
|
Labels: labels.FromMap(map[string]string{
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.AddressLabel: "localhost:9115",
|
|
|
|
model.MetricsPathLabel: "/probe",
|
|
|
|
model.JobLabel: "blackbox",
|
|
|
|
}),
|
|
|
|
DiscoveredLabels: nil,
|
|
|
|
Params: url.Values{"target": []string{"example.com"}},
|
|
|
|
Reports: []*testReport{{scrapeStart, 100 * time.Millisecond, errors.New("failed")}},
|
|
|
|
Active: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Identifier: "blackbox",
|
|
|
|
Labels: nil,
|
|
|
|
DiscoveredLabels: labels.FromMap(map[string]string{
|
|
|
|
model.SchemeLabel: "http",
|
|
|
|
model.AddressLabel: "http://dropped.example.com:9115",
|
|
|
|
model.MetricsPathLabel: "/probe",
|
|
|
|
model.JobLabel: "blackbox",
|
|
|
|
}),
|
|
|
|
Params: url.Values{},
|
|
|
|
Active: false,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
return newTestTargetRetriever(targets)
|
2019-12-04 19:33:01 +00:00
|
|
|
}
|
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
func setupRemote(s storage.Storage) *httptest.Server {
|
|
|
|
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
req, err := remote.DecodeReadRequest(r)
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
resp := prompb.ReadResponse{
|
|
|
|
Results: make([]*prompb.QueryResult, len(req.Queries)),
|
|
|
|
}
|
|
|
|
for i, query := range req.Queries {
|
2019-08-19 20:16:10 +00:00
|
|
|
matchers, err := remote.FromLabelMatchers(query.Matchers)
|
2018-06-16 17:26:37 +00:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusBadRequest)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:16:10 +00:00
|
|
|
var selectParams *storage.SelectParams
|
|
|
|
if query.Hints != nil {
|
|
|
|
selectParams = &storage.SelectParams{
|
|
|
|
Start: query.Hints.StartMs,
|
|
|
|
End: query.Hints.EndMs,
|
|
|
|
Step: query.Hints.StepMs,
|
|
|
|
Func: query.Hints.Func,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
querier, err := s.Querier(r.Context(), query.StartTimestampMs, query.EndTimestampMs)
|
2018-06-16 17:26:37 +00:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
defer querier.Close()
|
|
|
|
|
2019-01-02 11:10:13 +00:00
|
|
|
set, _, err := querier.Select(selectParams, matchers...)
|
2018-06-16 17:26:37 +00:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2018-09-05 13:50:50 +00:00
|
|
|
resp.Results[i], err = remote.ToQueryResult(set, 1e6)
|
2018-06-16 17:26:37 +00:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := remote.EncodeReadResponse(&resp, w); err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
return httptest.NewServer(handler)
|
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, testLabelAPI bool) {
|
2016-12-30 09:43:44 +00:00
|
|
|
start := time.Unix(0, 0)
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
type targetMetadata struct {
|
|
|
|
identifier string
|
|
|
|
metadata []scrape.MetricMetadata
|
|
|
|
}
|
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
type test struct {
|
2019-12-10 14:56:16 +00:00
|
|
|
endpoint apiFunc
|
|
|
|
params map[string]string
|
|
|
|
query url.Values
|
|
|
|
response interface{}
|
|
|
|
responseLen int
|
|
|
|
errType errorType
|
|
|
|
sorter func(interface{})
|
|
|
|
metadata []targetMetadata
|
2018-06-16 17:26:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var tests = []test{
|
2015-06-04 16:07:57 +00:00
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"2"},
|
2016-12-30 09:43:44 +00:00
|
|
|
"time": []string{"123.4"},
|
2015-06-04 16:07:57 +00:00
|
|
|
},
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeScalar,
|
2016-12-30 09:43:44 +00:00
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 2,
|
|
|
|
T: timestamp.FromTime(start.Add(123*time.Second + 400*time.Millisecond)),
|
2015-06-04 16:07:57 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
"time": []string{"1970-01-01T00:02:03Z"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeScalar,
|
2016-12-30 09:43:44 +00:00
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
|
|
|
T: timestamp.FromTime(start.Add(123 * time.Second)),
|
2015-06-04 16:07:57 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
"time": []string{"1970-01-01T01:02:03+01:00"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeScalar,
|
2016-12-30 09:43:44 +00:00
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
|
|
|
T: timestamp.FromTime(start.Add(123 * time.Second)),
|
2015-06-04 16:07:57 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-11-11 19:46:57 +00:00
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"0.333"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeScalar,
|
2016-12-30 09:43:44 +00:00
|
|
|
Result: promql.Scalar{
|
|
|
|
V: 0.333,
|
2018-06-16 17:26:37 +00:00
|
|
|
T: timestamp.FromTime(api.now()),
|
2015-11-11 19:46:57 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-06-09 11:44:49 +00:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeMatrix,
|
2016-12-30 09:43:44 +00:00
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: []promql.Point{
|
|
|
|
{V: 0, T: timestamp.FromTime(start)},
|
|
|
|
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
|
|
|
|
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
|
2015-06-09 11:44:49 +00:00
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
Metric: nil,
|
2015-06-09 11:44:49 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Missing query params in range queries.
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
// Bad query expression.
|
|
|
|
{
|
|
|
|
endpoint: api.query,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"invalid][query"},
|
|
|
|
"time": []string{"1970-01-01T01:02:03+01:00"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"invalid][query"},
|
|
|
|
"start": []string{"0"},
|
|
|
|
"end": []string{"100"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 14:16:20 +00:00
|
|
|
// Invalid step.
|
2016-08-16 13:10:02 +00:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"2"},
|
|
|
|
"step": []string{"0"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 14:16:20 +00:00
|
|
|
// Start after end.
|
2016-11-01 13:25:34 +00:00
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"2"},
|
|
|
|
"end": []string{"1"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2017-03-16 14:16:20 +00:00
|
|
|
// Start overflows int64 internally.
|
|
|
|
{
|
|
|
|
endpoint: api.queryRange,
|
|
|
|
query: url.Values{
|
|
|
|
"query": []string{"time()"},
|
|
|
|
"start": []string{"148966367200.372"},
|
|
|
|
"end": []string{"1489667272.372"},
|
|
|
|
"step": []string{"1"},
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2015-06-09 14:09:31 +00:00
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2015-11-05 10:23:43 +00:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o"}`},
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2016-12-30 09:43:44 +00:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o$"}`, `test_metric1{foo=~".+o"}`},
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
2015-11-05 10:23:43 +00:00
|
|
|
"match[]": []string{`test_metric1{foo=~".+o"}`, `none`},
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric1", "foo", "boo"),
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
|
|
|
},
|
2016-05-11 21:59:52 +00:00
|
|
|
// Start and end before series starts.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-2"},
|
|
|
|
"end": []string{"-1"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{},
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
// Start and end after series ends.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"100000"},
|
|
|
|
"end": []string{"100001"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{},
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
// Start before series starts, end after series ends.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-1"},
|
|
|
|
"end": []string{"100000"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start and end within series.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"100"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start within series, end after.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"1"},
|
|
|
|
"end": []string{"100000"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
// Start before series, end within series.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
query: url.Values{
|
|
|
|
"match[]": []string{`test_metric2`},
|
|
|
|
"start": []string{"-1"},
|
|
|
|
"end": []string{"1"},
|
|
|
|
},
|
2016-12-30 09:43:44 +00:00
|
|
|
response: []labels.Labels{
|
|
|
|
labels.FromStrings("__name__", "test_metric2", "foo", "boo"),
|
2016-05-11 21:59:52 +00:00
|
|
|
},
|
|
|
|
},
|
2015-06-09 14:09:31 +00:00
|
|
|
// Missing match[] query params in series requests.
|
|
|
|
{
|
|
|
|
endpoint: api.series,
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.dropSeries,
|
2017-07-06 12:38:40 +00:00
|
|
|
errType: errorInternal,
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
2017-05-11 15:09:24 +00:00
|
|
|
{
|
2016-12-02 12:31:43 +00:00
|
|
|
endpoint: api.targets,
|
2017-01-13 16:15:04 +00:00
|
|
|
response: &TargetDiscovery{
|
2018-10-25 08:19:20 +00:00
|
|
|
ActiveTargets: []*Target{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "blackbox",
|
2018-09-26 09:20:56 +00:00
|
|
|
},
|
2019-11-11 21:42:24 +00:00
|
|
|
ScrapePool: "blackbox",
|
|
|
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "down",
|
2020-02-17 17:19:15 +00:00
|
|
|
LastError: "failed: missing port in address",
|
2019-11-11 21:42:24 +00:00
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.1,
|
2018-10-25 08:19:20 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
},
|
2019-11-11 21:42:24 +00:00
|
|
|
ScrapePool: "test",
|
|
|
|
ScrapeURL: "http://example.com:8080/metrics",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://example.com:8080/metrics",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "up",
|
|
|
|
LastError: "",
|
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.07,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
DroppedTargets: []*DroppedTarget{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{
|
|
|
|
"__address__": "http://dropped.example.com:9115",
|
|
|
|
"__metrics_path__": "/probe",
|
|
|
|
"__scheme__": "http",
|
|
|
|
"job": "blackbox",
|
|
|
|
},
|
2017-01-13 16:15:04 +00:00
|
|
|
},
|
2016-12-02 12:31:43 +00:00
|
|
|
},
|
2019-11-11 21:42:24 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.targets,
|
|
|
|
query: url.Values{
|
|
|
|
"state": []string{"any"},
|
|
|
|
},
|
|
|
|
response: &TargetDiscovery{
|
|
|
|
ActiveTargets: []*Target{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "blackbox",
|
|
|
|
},
|
|
|
|
ScrapePool: "blackbox",
|
|
|
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "down",
|
2020-02-17 17:19:15 +00:00
|
|
|
LastError: "failed: missing port in address",
|
2019-11-11 21:42:24 +00:00
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
},
|
|
|
|
ScrapePool: "test",
|
|
|
|
ScrapeURL: "http://example.com:8080/metrics",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://example.com:8080/metrics",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "up",
|
|
|
|
LastError: "",
|
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.07,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
DroppedTargets: []*DroppedTarget{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{
|
|
|
|
"__address__": "http://dropped.example.com:9115",
|
|
|
|
"__metrics_path__": "/probe",
|
|
|
|
"__scheme__": "http",
|
|
|
|
"job": "blackbox",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.targets,
|
|
|
|
query: url.Values{
|
|
|
|
"state": []string{"active"},
|
|
|
|
},
|
|
|
|
response: &TargetDiscovery{
|
|
|
|
ActiveTargets: []*Target{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "blackbox",
|
|
|
|
},
|
|
|
|
ScrapePool: "blackbox",
|
|
|
|
ScrapeURL: "http://localhost:9115/probe?target=example.com",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://localhost:9115/probe?target=example.com",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "down",
|
2020-02-17 17:19:15 +00:00
|
|
|
LastError: "failed: missing port in address",
|
2019-11-11 21:42:24 +00:00
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.1,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{},
|
|
|
|
Labels: map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
},
|
|
|
|
ScrapePool: "test",
|
|
|
|
ScrapeURL: "http://example.com:8080/metrics",
|
2020-02-17 17:19:15 +00:00
|
|
|
GlobalURL: "http://example.com:8080/metrics",
|
2019-11-11 21:42:24 +00:00
|
|
|
Health: "up",
|
|
|
|
LastError: "",
|
|
|
|
LastScrape: scrapeStart,
|
|
|
|
LastScrapeDuration: 0.07,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
DroppedTargets: []*DroppedTarget{},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.targets,
|
|
|
|
query: url.Values{
|
|
|
|
"state": []string{"Dropped"},
|
|
|
|
},
|
|
|
|
response: &TargetDiscovery{
|
|
|
|
ActiveTargets: []*Target{},
|
2018-10-25 08:19:20 +00:00
|
|
|
DroppedTargets: []*DroppedTarget{
|
|
|
|
{
|
|
|
|
DiscoveredLabels: map[string]string{
|
|
|
|
"__address__": "http://dropped.example.com:9115",
|
|
|
|
"__metrics_path__": "/probe",
|
|
|
|
"__scheme__": "http",
|
|
|
|
"job": "blackbox",
|
2018-02-21 17:26:18 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2016-12-02 12:31:43 +00:00
|
|
|
},
|
2017-05-11 15:09:24 +00:00
|
|
|
},
|
2019-12-04 19:33:01 +00:00
|
|
|
// With a matching metric.
|
|
|
|
{
|
|
|
|
endpoint: api.targetMetadata,
|
|
|
|
query: url.Values{
|
|
|
|
"metric": []string{"go_threads"},
|
|
|
|
},
|
2019-12-10 14:56:16 +00:00
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-12-04 19:33:01 +00:00
|
|
|
response: []metricMetadata{
|
|
|
|
{
|
|
|
|
Target: labels.FromMap(map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
}),
|
|
|
|
Help: "Number of OS threads created.",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// With a matching target.
|
|
|
|
{
|
|
|
|
endpoint: api.targetMetadata,
|
|
|
|
query: url.Values{
|
|
|
|
"match_target": []string{"{job=\"blackbox\"}"},
|
|
|
|
},
|
2019-12-10 14:56:16 +00:00
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "prometheus_tsdb_storage_blocks_bytes",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "The number of bytes that are currently used for local storage by all blocks.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-12-04 19:33:01 +00:00
|
|
|
response: []metricMetadata{
|
|
|
|
{
|
|
|
|
Target: labels.FromMap(map[string]string{
|
|
|
|
"job": "blackbox",
|
|
|
|
}),
|
|
|
|
Metric: "prometheus_tsdb_storage_blocks_bytes",
|
|
|
|
Help: "The number of bytes that are currently used for local storage by all blocks.",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Without a target or metric.
|
|
|
|
{
|
|
|
|
endpoint: api.targetMetadata,
|
2019-12-10 14:56:16 +00:00
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "prometheus_tsdb_storage_blocks_bytes",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "The number of bytes that are currently used for local storage by all blocks.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-12-04 19:33:01 +00:00
|
|
|
response: []metricMetadata{
|
|
|
|
{
|
|
|
|
Target: labels.FromMap(map[string]string{
|
|
|
|
"job": "test",
|
|
|
|
}),
|
|
|
|
Metric: "go_threads",
|
|
|
|
Help: "Number of OS threads created.",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Target: labels.FromMap(map[string]string{
|
|
|
|
"job": "blackbox",
|
|
|
|
}),
|
|
|
|
Metric: "prometheus_tsdb_storage_blocks_bytes",
|
|
|
|
Help: "The number of bytes that are currently used for local storage by all blocks.",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
2019-12-09 21:36:38 +00:00
|
|
|
sorter: func(m interface{}) {
|
|
|
|
sort.Slice(m.([]metricMetadata), func(i, j int) bool {
|
|
|
|
s := m.([]metricMetadata)
|
|
|
|
return s[i].Metric < s[j].Metric
|
|
|
|
})
|
|
|
|
},
|
2019-12-04 19:33:01 +00:00
|
|
|
},
|
|
|
|
// Without a matching metric.
|
|
|
|
{
|
|
|
|
endpoint: api.targetMetadata,
|
|
|
|
query: url.Values{
|
|
|
|
"match_target": []string{"{job=\"non-existentblackbox\"}"},
|
|
|
|
},
|
2019-12-10 14:56:16 +00:00
|
|
|
response: []metricMetadata{},
|
2019-12-04 19:33:01 +00:00
|
|
|
},
|
2015-06-09 14:09:31 +00:00
|
|
|
{
|
2017-01-13 09:20:11 +00:00
|
|
|
endpoint: api.alertmanagers,
|
|
|
|
response: &AlertmanagerDiscovery{
|
|
|
|
ActiveAlertmanagers: []*AlertmanagerTarget{
|
2017-04-05 13:24:22 +00:00
|
|
|
{
|
2017-01-13 09:20:11 +00:00
|
|
|
URL: "http://alertmanager.example.com:8080/api/v1/alerts",
|
|
|
|
},
|
|
|
|
},
|
2018-02-21 09:00:07 +00:00
|
|
|
DroppedAlertmanagers: []*AlertmanagerTarget{
|
|
|
|
{
|
|
|
|
URL: "http://dropped.alertmanager.example.com:8080/api/v1/alerts",
|
|
|
|
},
|
|
|
|
},
|
2017-01-13 09:20:11 +00:00
|
|
|
},
|
2015-06-09 14:09:31 +00:00
|
|
|
},
|
2019-12-10 14:56:16 +00:00
|
|
|
// With metadata available.
|
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "prometheus_engine_query_duration_seconds",
|
|
|
|
Type: textparse.MetricTypeSummary,
|
|
|
|
Help: "Query timings",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Metric: "go_info",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Information about the Go environment.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
response: map[string][]metadata{
|
|
|
|
"prometheus_engine_query_duration_seconds": {{textparse.MetricTypeSummary, "Query timings", ""}},
|
|
|
|
"go_info": {{textparse.MetricTypeGauge, "Information about the Go environment.", ""}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// With duplicate metadata for a metric that comes from different targets.
|
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
response: map[string][]metadata{
|
|
|
|
"go_threads": {{textparse.MetricTypeGauge, "Number of OS threads created", ""}},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// With non-duplicate metadata for the same metric from different targets.
|
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads that were created.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
response: map[string][]metadata{
|
|
|
|
"go_threads": []metadata{
|
|
|
|
{textparse.MetricTypeGauge, "Number of OS threads created", ""},
|
|
|
|
{textparse.MetricTypeGauge, "Number of OS threads that were created.", ""},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
sorter: func(m interface{}) {
|
|
|
|
v := m.(map[string][]metadata)["go_threads"]
|
|
|
|
|
|
|
|
sort.Slice(v, func(i, j int) bool {
|
|
|
|
return v[i].Help < v[j].Help
|
|
|
|
})
|
|
|
|
},
|
|
|
|
},
|
2019-12-10 15:15:13 +00:00
|
|
|
// With a limit for the number of metrics returned.
|
2019-12-10 14:56:16 +00:00
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
query: url.Values{
|
|
|
|
"limit": []string{"2"},
|
|
|
|
},
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Metric: "prometheus_engine_query_duration_seconds",
|
|
|
|
Type: textparse.MetricTypeSummary,
|
|
|
|
Help: "Query Timmings.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_gc_duration_seconds",
|
|
|
|
Type: textparse.MetricTypeSummary,
|
|
|
|
Help: "A summary of the GC invocation durations.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
responseLen: 2,
|
|
|
|
},
|
2019-12-10 15:22:10 +00:00
|
|
|
// When requesting a specific metric that is present.
|
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
query: url.Values{"metric": []string{"go_threads"}},
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
identifier: "blackbox",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_gc_duration_seconds",
|
|
|
|
Type: textparse.MetricTypeSummary,
|
|
|
|
Help: "A summary of the GC invocation durations.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads that were created.",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
response: map[string][]metadata{
|
|
|
|
"go_threads": []metadata{
|
|
|
|
{textparse.MetricTypeGauge, "Number of OS threads created", ""},
|
|
|
|
{textparse.MetricTypeGauge, "Number of OS threads that were created.", ""},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
sorter: func(m interface{}) {
|
|
|
|
v := m.(map[string][]metadata)["go_threads"]
|
|
|
|
|
|
|
|
sort.Slice(v, func(i, j int) bool {
|
|
|
|
return v[i].Help < v[j].Help
|
|
|
|
})
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// With a specific metric that is not present.
|
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
query: url.Values{"metric": []string{"go_gc_duration_seconds"}},
|
|
|
|
metadata: []targetMetadata{
|
|
|
|
{
|
|
|
|
identifier: "test",
|
|
|
|
metadata: []scrape.MetricMetadata{
|
|
|
|
{
|
|
|
|
Metric: "go_threads",
|
|
|
|
Type: textparse.MetricTypeGauge,
|
|
|
|
Help: "Number of OS threads created",
|
|
|
|
Unit: "",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
response: map[string][]metadata{},
|
|
|
|
},
|
2019-12-10 15:15:13 +00:00
|
|
|
// With no available metadata.
|
2019-12-10 14:56:16 +00:00
|
|
|
{
|
|
|
|
endpoint: api.metricMetadata,
|
|
|
|
response: map[string][]metadata{},
|
|
|
|
},
|
2017-05-11 15:09:24 +00:00
|
|
|
{
|
|
|
|
endpoint: api.serveConfig,
|
|
|
|
response: &prometheusConfig{
|
|
|
|
YAML: samplePrometheusCfg.String(),
|
|
|
|
},
|
|
|
|
},
|
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 08:49:02 +00:00
|
|
|
{
|
|
|
|
endpoint: api.serveFlags,
|
|
|
|
response: sampleFlagMap,
|
|
|
|
},
|
2018-03-25 16:50:34 +00:00
|
|
|
{
|
|
|
|
endpoint: api.alerts,
|
|
|
|
response: &AlertDiscovery{
|
2018-06-27 07:15:17 +00:00
|
|
|
Alerts: []*Alert{},
|
2018-03-25 16:50:34 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.rules,
|
2018-06-27 07:15:17 +00:00
|
|
|
response: &RuleDiscovery{
|
|
|
|
RuleGroups: []*RuleGroup{
|
2018-03-25 16:50:34 +00:00
|
|
|
{
|
2018-06-27 07:15:17 +00:00
|
|
|
Name: "grp",
|
|
|
|
File: "/path/to/file",
|
|
|
|
Interval: 1,
|
|
|
|
Rules: []rule{
|
|
|
|
alertingRule{
|
2019-12-09 22:42:59 +00:00
|
|
|
State: "inactive",
|
2018-06-27 07:15:17 +00:00
|
|
|
Name: "test_metric3",
|
|
|
|
Query: "absent(test_metric3) != 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
2018-08-23 13:00:10 +00:00
|
|
|
Health: "unknown",
|
2018-06-27 07:15:17 +00:00
|
|
|
Type: "alerting",
|
|
|
|
},
|
|
|
|
alertingRule{
|
2019-12-09 22:42:59 +00:00
|
|
|
State: "inactive",
|
2018-06-27 07:15:17 +00:00
|
|
|
Name: "test_metric4",
|
|
|
|
Query: "up == 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
2018-08-23 13:00:10 +00:00
|
|
|
Health: "unknown",
|
2018-06-27 07:15:17 +00:00
|
|
|
Type: "alerting",
|
2018-03-25 16:50:34 +00:00
|
|
|
},
|
2018-06-27 07:15:17 +00:00
|
|
|
recordingRule{
|
|
|
|
Name: "recording-rule-1",
|
|
|
|
Query: "vector(1)",
|
|
|
|
Labels: labels.Labels{},
|
2018-08-23 13:00:10 +00:00
|
|
|
Health: "unknown",
|
2018-06-27 07:15:17 +00:00
|
|
|
Type: "recording",
|
2018-03-25 16:50:34 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-12-09 22:42:59 +00:00
|
|
|
{
|
|
|
|
endpoint: api.rules,
|
|
|
|
query: url.Values{
|
|
|
|
"type": []string{"alert"},
|
|
|
|
},
|
|
|
|
response: &RuleDiscovery{
|
|
|
|
RuleGroups: []*RuleGroup{
|
|
|
|
{
|
|
|
|
Name: "grp",
|
|
|
|
File: "/path/to/file",
|
|
|
|
Interval: 1,
|
|
|
|
Rules: []rule{
|
|
|
|
alertingRule{
|
|
|
|
State: "inactive",
|
|
|
|
Name: "test_metric3",
|
|
|
|
Query: "absent(test_metric3) != 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
|
|
|
Health: "unknown",
|
|
|
|
Type: "alerting",
|
|
|
|
},
|
|
|
|
alertingRule{
|
|
|
|
State: "inactive",
|
|
|
|
Name: "test_metric4",
|
|
|
|
Query: "up == 1",
|
|
|
|
Duration: 1,
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Annotations: labels.Labels{},
|
|
|
|
Alerts: []*Alert{},
|
|
|
|
Health: "unknown",
|
|
|
|
Type: "alerting",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.rules,
|
|
|
|
query: url.Values{
|
|
|
|
"type": []string{"record"},
|
|
|
|
},
|
|
|
|
response: &RuleDiscovery{
|
|
|
|
RuleGroups: []*RuleGroup{
|
|
|
|
{
|
|
|
|
Name: "grp",
|
|
|
|
File: "/path/to/file",
|
|
|
|
Interval: 1,
|
|
|
|
Rules: []rule{
|
|
|
|
recordingRule{
|
|
|
|
Name: "recording-rule-1",
|
|
|
|
Query: "vector(1)",
|
|
|
|
Labels: labels.Labels{},
|
|
|
|
Health: "unknown",
|
|
|
|
Type: "recording",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
if testLabelAPI {
|
|
|
|
tests = append(tests, []test{
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "__name__",
|
|
|
|
},
|
|
|
|
response: []string{
|
|
|
|
"test_metric1",
|
|
|
|
"test_metric2",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "foo",
|
|
|
|
},
|
|
|
|
response: []string{
|
|
|
|
"bar",
|
|
|
|
"boo",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
// Bad name parameter.
|
|
|
|
{
|
|
|
|
endpoint: api.labelValues,
|
|
|
|
params: map[string]string{
|
|
|
|
"name": "not!!!allowed",
|
|
|
|
},
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
2018-11-19 10:21:14 +00:00
|
|
|
// Label names.
|
|
|
|
{
|
|
|
|
endpoint: api.labelNames,
|
|
|
|
response: []string{"__name__", "foo"},
|
|
|
|
},
|
2018-06-16 17:26:37 +00:00
|
|
|
}...)
|
|
|
|
}
|
|
|
|
|
2017-11-11 00:53:48 +00:00
|
|
|
methods := func(f apiFunc) []string {
|
|
|
|
fp := reflect.ValueOf(f).Pointer()
|
2019-04-02 17:00:29 +00:00
|
|
|
if fp == reflect.ValueOf(api.query).Pointer() || fp == reflect.ValueOf(api.queryRange).Pointer() || fp == reflect.ValueOf(api.series).Pointer() {
|
2017-11-11 00:53:48 +00:00
|
|
|
return []string{http.MethodGet, http.MethodPost}
|
2015-06-08 19:19:52 +00:00
|
|
|
}
|
2017-11-11 00:53:48 +00:00
|
|
|
return []string{http.MethodGet}
|
|
|
|
}
|
2015-06-08 19:19:52 +00:00
|
|
|
|
2017-11-11 00:53:48 +00:00
|
|
|
request := func(m string, q url.Values) (*http.Request, error) {
|
|
|
|
if m == http.MethodPost {
|
|
|
|
r, err := http.NewRequest(m, "http://example.com", strings.NewReader(q.Encode()))
|
|
|
|
r.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
2020-01-08 13:28:43 +00:00
|
|
|
r.RemoteAddr = "127.0.0.1:20201"
|
2017-11-11 00:53:48 +00:00
|
|
|
return r, err
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
2020-01-08 13:28:43 +00:00
|
|
|
r, err := http.NewRequest(m, fmt.Sprintf("http://example.com?%s", q.Encode()), nil)
|
|
|
|
r.RemoteAddr = "127.0.0.1:20201"
|
|
|
|
return r, err
|
2017-11-11 00:53:48 +00:00
|
|
|
}
|
|
|
|
|
2018-06-16 17:26:37 +00:00
|
|
|
for i, test := range tests {
|
2017-11-11 00:53:48 +00:00
|
|
|
for _, method := range methods(test.endpoint) {
|
|
|
|
// Build a context with the correct request params.
|
|
|
|
ctx := context.Background()
|
|
|
|
for p, v := range test.params {
|
|
|
|
ctx = route.WithParam(ctx, p, v)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
2018-06-16 17:26:37 +00:00
|
|
|
t.Logf("run %d\t%s\t%q", i, method, test.query.Encode())
|
2017-11-11 00:53:48 +00:00
|
|
|
|
|
|
|
req, err := request(method, test.query)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
2019-12-10 14:56:16 +00:00
|
|
|
|
|
|
|
tr.ResetMetadataStore()
|
|
|
|
for _, tm := range test.metadata {
|
|
|
|
tr.SetMetadataStoreForTargets(tm.identifier, &testMetaStore{Metadata: tm.metadata})
|
|
|
|
}
|
|
|
|
|
2018-11-30 14:27:12 +00:00
|
|
|
res := test.endpoint(req.WithContext(ctx))
|
|
|
|
assertAPIError(t, res.err, test.errType)
|
2019-12-09 21:36:38 +00:00
|
|
|
|
|
|
|
if test.sorter != nil {
|
|
|
|
test.sorter(res.data)
|
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
if test.responseLen != 0 {
|
|
|
|
assertAPIResponseLength(t, res.data, test.responseLen)
|
|
|
|
} else {
|
|
|
|
assertAPIResponse(t, res.data, test.response)
|
|
|
|
}
|
2018-11-15 13:22:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2018-06-27 07:15:17 +00:00
|
|
|
|
2018-11-15 13:22:16 +00:00
|
|
|
func assertAPIError(t *testing.T, got *apiError, exp errorType) {
|
|
|
|
t.Helper()
|
2018-06-27 07:15:17 +00:00
|
|
|
|
2018-11-15 13:22:16 +00:00
|
|
|
if got != nil {
|
|
|
|
if exp == errorNone {
|
|
|
|
t.Fatalf("Unexpected error: %s", got)
|
|
|
|
}
|
|
|
|
if exp != got.typ {
|
|
|
|
t.Fatalf("Expected error of type %q but got type %q (%q)", exp, got.typ, got)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
2018-11-15 13:22:16 +00:00
|
|
|
return
|
|
|
|
}
|
2019-05-03 13:11:28 +00:00
|
|
|
if exp != errorNone {
|
2018-11-15 13:22:16 +00:00
|
|
|
t.Fatalf("Expected error of type %q but got none", exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func assertAPIResponse(t *testing.T, got interface{}, exp interface{}) {
|
2019-12-09 21:36:38 +00:00
|
|
|
t.Helper()
|
|
|
|
|
2018-11-15 13:22:16 +00:00
|
|
|
if !reflect.DeepEqual(exp, got) {
|
|
|
|
respJSON, err := json.Marshal(got)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to marshal response as JSON: %v", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
expectedRespJSON, err := json.Marshal(exp)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to marshal expected response as JSON: %v", err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
t.Fatalf(
|
|
|
|
"Response does not match, expected:\n%+v\ngot:\n%+v",
|
|
|
|
string(expectedRespJSON),
|
|
|
|
string(respJSON),
|
|
|
|
)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-10 14:56:16 +00:00
|
|
|
func assertAPIResponseLength(t *testing.T, got interface{}, expLen int) {
|
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
gotLen := reflect.ValueOf(got).Len()
|
|
|
|
if gotLen != expLen {
|
|
|
|
t.Fatalf(
|
|
|
|
"Response length does not match, expected:\n%d\ngot:\n%d",
|
|
|
|
expLen,
|
|
|
|
gotLen,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:16:10 +00:00
|
|
|
func TestSampledReadEndpoint(t *testing.T) {
|
2017-10-23 20:28:17 +00:00
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo="bar",baz="qux"} 1
|
|
|
|
`)
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
defer suite.Close()
|
|
|
|
|
2019-08-19 20:16:10 +00:00
|
|
|
err = suite.Run()
|
|
|
|
testutil.Ok(t, err)
|
2017-10-23 20:28:17 +00:00
|
|
|
|
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
|
|
|
config: func() config.Config {
|
|
|
|
return config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{
|
2019-03-08 16:29:25 +00:00
|
|
|
ExternalLabels: labels.Labels{
|
2019-08-19 20:16:10 +00:00
|
|
|
// We expect external labels to be added, with the source labels honored.
|
2019-03-08 16:29:25 +00:00
|
|
|
{Name: "baz", Value: "a"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "d", Value: "e"},
|
2017-10-23 20:28:17 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
2018-09-25 19:07:34 +00:00
|
|
|
remoteReadSampleLimit: 1e6,
|
|
|
|
remoteReadGate: gate.New(1),
|
2017-10-23 20:28:17 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Encode the request.
|
|
|
|
matcher1, err := labels.NewMatcher(labels.MatchEqual, "__name__", "test_metric1")
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
matcher2, err := labels.NewMatcher(labels.MatchEqual, "d", "e")
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2018-05-08 08:48:13 +00:00
|
|
|
query, err := remote.ToQuery(0, 1, []*labels.Matcher{matcher1, matcher2}, &storage.SelectParams{Step: 0, Func: "avg"})
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
req := &prompb.ReadRequest{Queries: []*prompb.Query{query}}
|
|
|
|
data, err := proto.Marshal(req)
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
compressed := snappy.Encode(nil, data)
|
|
|
|
request, err := http.NewRequest("POST", "", bytes.NewBuffer(compressed))
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
recorder := httptest.NewRecorder()
|
|
|
|
api.remoteRead(recorder, request)
|
|
|
|
|
2018-09-05 13:50:50 +00:00
|
|
|
if recorder.Code/100 != 2 {
|
|
|
|
t.Fatal(recorder.Code)
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Equals(t, "application/x-protobuf", recorder.Result().Header.Get("Content-Type"))
|
|
|
|
testutil.Equals(t, "snappy", recorder.Result().Header.Get("Content-Encoding"))
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
// Decode the response.
|
|
|
|
compressed, err = ioutil.ReadAll(recorder.Result().Body)
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2017-10-23 20:28:17 +00:00
|
|
|
uncompressed, err := snappy.Decode(nil, compressed)
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
2017-10-23 20:28:17 +00:00
|
|
|
|
|
|
|
var resp prompb.ReadResponse
|
|
|
|
err = proto.Unmarshal(uncompressed, &resp)
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
2017-10-23 20:28:17 +00:00
|
|
|
|
|
|
|
if len(resp.Results) != 1 {
|
|
|
|
t.Fatalf("Expected 1 result, got %d", len(resp.Results))
|
|
|
|
}
|
|
|
|
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Equals(t, &prompb.QueryResult{
|
2017-10-23 20:28:17 +00:00
|
|
|
Timeseries: []*prompb.TimeSeries{
|
|
|
|
{
|
2019-01-15 19:13:39 +00:00
|
|
|
Labels: []prompb.Label{
|
2017-10-23 20:28:17 +00:00
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
2017-10-26 10:44:49 +00:00
|
|
|
{Name: "baz", Value: "qux"},
|
2017-10-23 20:28:17 +00:00
|
|
|
{Name: "d", Value: "e"},
|
2017-10-26 10:44:49 +00:00
|
|
|
{Name: "foo", Value: "bar"},
|
2017-10-23 20:28:17 +00:00
|
|
|
},
|
2018-09-25 18:14:00 +00:00
|
|
|
Samples: []prompb.Sample{{Value: 1, Timestamp: 0}},
|
2017-10-23 20:28:17 +00:00
|
|
|
},
|
|
|
|
},
|
2019-08-19 20:16:10 +00:00
|
|
|
}, resp.Results[0])
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestStreamReadEndpoint(t *testing.T) {
|
|
|
|
// First with 120 samples. We expect 1 frame with 1 chunk.
|
|
|
|
// Second with 121 samples, We expect 1 frame with 2 chunks.
|
|
|
|
// Third with 241 samples. We expect 1 frame with 2 chunks, and 1 frame with 1 chunk for the same series due to bytes limit.
|
|
|
|
suite, err := promql.NewTest(t, `
|
|
|
|
load 1m
|
|
|
|
test_metric1{foo="bar1",baz="qux"} 0+100x119
|
|
|
|
test_metric1{foo="bar2",baz="qux"} 0+100x120
|
|
|
|
test_metric1{foo="bar3",baz="qux"} 0+100x240
|
|
|
|
`)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
defer suite.Close()
|
|
|
|
|
|
|
|
testutil.Ok(t, suite.Run())
|
|
|
|
|
|
|
|
api := &API{
|
|
|
|
Queryable: suite.Storage(),
|
|
|
|
QueryEngine: suite.QueryEngine(),
|
|
|
|
config: func() config.Config {
|
|
|
|
return config.Config{
|
|
|
|
GlobalConfig: config.GlobalConfig{
|
|
|
|
ExternalLabels: labels.Labels{
|
|
|
|
// We expect external labels to be added, with the source labels honored.
|
|
|
|
{Name: "baz", Value: "a"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
},
|
|
|
|
remoteReadSampleLimit: 1e6,
|
|
|
|
remoteReadGate: gate.New(1),
|
|
|
|
// Labelset has 57 bytes. Full chunk in test data has roughly 240 bytes. This allows us to have at max 2 chunks in this test.
|
|
|
|
remoteReadMaxBytesInFrame: 57 + 480,
|
|
|
|
}
|
|
|
|
|
|
|
|
// Encode the request.
|
|
|
|
matcher1, err := labels.NewMatcher(labels.MatchEqual, "__name__", "test_metric1")
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
matcher2, err := labels.NewMatcher(labels.MatchEqual, "d", "e")
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2019-08-29 10:57:38 +00:00
|
|
|
matcher3, err := labels.NewMatcher(labels.MatchEqual, "foo", "bar1")
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2020-02-10 20:43:50 +00:00
|
|
|
query1, err := remote.ToQuery(0, 14400001, []*labels.Matcher{matcher1, matcher2}, &storage.SelectParams{
|
|
|
|
Step: 1,
|
|
|
|
Func: "avg",
|
|
|
|
Start: 0,
|
|
|
|
End: 14400001,
|
|
|
|
})
|
2019-08-29 10:57:38 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
2020-02-10 20:43:50 +00:00
|
|
|
query2, err := remote.ToQuery(0, 14400001, []*labels.Matcher{matcher1, matcher3}, &storage.SelectParams{
|
|
|
|
Step: 1,
|
|
|
|
Func: "avg",
|
|
|
|
Start: 0,
|
|
|
|
End: 14400001,
|
|
|
|
})
|
2019-08-19 20:16:10 +00:00
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
req := &prompb.ReadRequest{
|
2019-08-29 10:57:38 +00:00
|
|
|
Queries: []*prompb.Query{query1, query2},
|
2019-08-19 20:16:10 +00:00
|
|
|
AcceptedResponseTypes: []prompb.ReadRequest_ResponseType{prompb.ReadRequest_STREAMED_XOR_CHUNKS},
|
|
|
|
}
|
|
|
|
data, err := proto.Marshal(req)
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
compressed := snappy.Encode(nil, data)
|
|
|
|
request, err := http.NewRequest("POST", "", bytes.NewBuffer(compressed))
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
|
|
|
|
recorder := httptest.NewRecorder()
|
|
|
|
api.remoteRead(recorder, request)
|
|
|
|
|
|
|
|
if recorder.Code/100 != 2 {
|
|
|
|
t.Fatal(recorder.Code)
|
2017-10-23 20:28:17 +00:00
|
|
|
}
|
2019-08-19 20:16:10 +00:00
|
|
|
|
|
|
|
testutil.Equals(t, "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse", recorder.Result().Header.Get("Content-Type"))
|
|
|
|
testutil.Equals(t, "", recorder.Result().Header.Get("Content-Encoding"))
|
|
|
|
|
|
|
|
var results []*prompb.ChunkedReadResponse
|
|
|
|
stream := remote.NewChunkedReader(recorder.Result().Body, remote.DefaultChunkedReadLimit, nil)
|
|
|
|
for {
|
|
|
|
res := &prompb.ChunkedReadResponse{}
|
|
|
|
err := stream.NextProto(res)
|
|
|
|
if err == io.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
testutil.Ok(t, err)
|
|
|
|
results = append(results, res)
|
|
|
|
}
|
|
|
|
|
2019-08-29 10:57:38 +00:00
|
|
|
if len(results) != 5 {
|
|
|
|
t.Fatalf("Expected 5 result, got %d", len(results))
|
2017-10-23 20:28:17 +00:00
|
|
|
}
|
2019-08-19 20:16:10 +00:00
|
|
|
|
|
|
|
testutil.Equals(t, []*prompb.ChunkedReadResponse{
|
|
|
|
{
|
|
|
|
ChunkedSeries: []*prompb.ChunkedSeries{
|
|
|
|
{
|
|
|
|
Labels: []prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "baz", Value: "qux"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
{Name: "foo", Value: "bar1"},
|
|
|
|
},
|
|
|
|
Chunks: []prompb.Chunk{
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MaxTimeMs: 7140000,
|
|
|
|
Data: []byte("\000x\000\000\000\000\000\000\000\000\000\340\324\003\302|\005\224\000\301\254}\351z2\320O\355\264n[\007\316\224\243md\371\320\375\032Pm\nS\235\016Q\255\006P\275\250\277\312\201Z\003(3\240R\207\332\005(\017\240\322\201\332=(\023\2402\203Z\007(w\2402\201Z\017(\023\265\227\364P\033@\245\007\364\nP\033C\245\002t\036P+@e\036\364\016Pk@e\002t:P;A\245\001\364\nS\373@\245\006t\006P+C\345\002\364\006Pk@\345\036t\nP\033A\245\003\364:P\033@\245\006t\016ZJ\377\\\205\313\210\327\270\017\345+F[\310\347E)\355\024\241\366\342}(v\215(N\203)\326\207(\336\203(V\332W\362\202t4\240m\005(\377AJ\006\320\322\202t\374\240\255\003(oA\312:\3202"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ChunkedSeries: []*prompb.ChunkedSeries{
|
|
|
|
{
|
|
|
|
Labels: []prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "baz", Value: "qux"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
{Name: "foo", Value: "bar2"},
|
|
|
|
},
|
|
|
|
Chunks: []prompb.Chunk{
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MaxTimeMs: 7140000,
|
|
|
|
Data: []byte("\000x\000\000\000\000\000\000\000\000\000\340\324\003\302|\005\224\000\301\254}\351z2\320O\355\264n[\007\316\224\243md\371\320\375\032Pm\nS\235\016Q\255\006P\275\250\277\312\201Z\003(3\240R\207\332\005(\017\240\322\201\332=(\023\2402\203Z\007(w\2402\201Z\017(\023\265\227\364P\033@\245\007\364\nP\033C\245\002t\036P+@e\036\364\016Pk@e\002t:P;A\245\001\364\nS\373@\245\006t\006P+C\345\002\364\006Pk@\345\036t\nP\033A\245\003\364:P\033@\245\006t\016ZJ\377\\\205\313\210\327\270\017\345+F[\310\347E)\355\024\241\366\342}(v\215(N\203)\326\207(\336\203(V\332W\362\202t4\240m\005(\377AJ\006\320\322\202t\374\240\255\003(oA\312:\3202"),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MinTimeMs: 7200000,
|
|
|
|
MaxTimeMs: 7200000,
|
|
|
|
Data: []byte("\000\001\200\364\356\006@\307p\000\000\000\000\000\000"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ChunkedSeries: []*prompb.ChunkedSeries{
|
|
|
|
{
|
|
|
|
Labels: []prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "baz", Value: "qux"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
{Name: "foo", Value: "bar3"},
|
|
|
|
},
|
|
|
|
Chunks: []prompb.Chunk{
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MaxTimeMs: 7140000,
|
|
|
|
Data: []byte("\000x\000\000\000\000\000\000\000\000\000\340\324\003\302|\005\224\000\301\254}\351z2\320O\355\264n[\007\316\224\243md\371\320\375\032Pm\nS\235\016Q\255\006P\275\250\277\312\201Z\003(3\240R\207\332\005(\017\240\322\201\332=(\023\2402\203Z\007(w\2402\201Z\017(\023\265\227\364P\033@\245\007\364\nP\033C\245\002t\036P+@e\036\364\016Pk@e\002t:P;A\245\001\364\nS\373@\245\006t\006P+C\345\002\364\006Pk@\345\036t\nP\033A\245\003\364:P\033@\245\006t\016ZJ\377\\\205\313\210\327\270\017\345+F[\310\347E)\355\024\241\366\342}(v\215(N\203)\326\207(\336\203(V\332W\362\202t4\240m\005(\377AJ\006\320\322\202t\374\240\255\003(oA\312:\3202"),
|
|
|
|
},
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MinTimeMs: 7200000,
|
|
|
|
MaxTimeMs: 14340000,
|
|
|
|
Data: []byte("\000x\200\364\356\006@\307p\000\000\000\000\000\340\324\003\340>\224\355\260\277\322\200\372\005(=\240R\207:\003(\025\240\362\201z\003(\365\240r\203:\005(\r\241\322\201\372\r(\r\240R\237:\007(5\2402\201z\037(\025\2402\203:\005(\375\240R\200\372\r(\035\241\322\201:\003(5\240r\326g\364\271\213\227!\253q\037\312N\340GJ\033E)\375\024\241\266\362}(N\217(V\203)\336\207(\326\203(N\334W\322\203\2644\240}\005(\373AJ\031\3202\202\264\374\240\275\003(kA\3129\320R\201\2644\240\375\264\277\322\200\332\005(3\240r\207Z\003(\027\240\362\201Z\003(\363\240R\203\332\005(\017\241\322\201\332\r(\023\2402\237Z\007(7\2402\201Z\037(\023\240\322\200\332\005(\377\240R\200\332\r "),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
ChunkedSeries: []*prompb.ChunkedSeries{
|
|
|
|
{
|
|
|
|
Labels: []prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "baz", Value: "qux"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
{Name: "foo", Value: "bar3"},
|
|
|
|
},
|
|
|
|
Chunks: []prompb.Chunk{
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MinTimeMs: 14400000,
|
|
|
|
MaxTimeMs: 14400000,
|
|
|
|
Data: []byte("\000\001\200\350\335\r@\327p\000\000\000\000\000\000"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-08-29 10:57:38 +00:00
|
|
|
{
|
|
|
|
ChunkedSeries: []*prompb.ChunkedSeries{
|
|
|
|
{
|
|
|
|
Labels: []prompb.Label{
|
|
|
|
{Name: "__name__", Value: "test_metric1"},
|
|
|
|
{Name: "b", Value: "c"},
|
|
|
|
{Name: "baz", Value: "qux"},
|
|
|
|
{Name: "d", Value: "e"},
|
|
|
|
{Name: "foo", Value: "bar1"},
|
|
|
|
},
|
|
|
|
Chunks: []prompb.Chunk{
|
|
|
|
{
|
|
|
|
Type: prompb.Chunk_XOR,
|
|
|
|
MaxTimeMs: 7140000,
|
|
|
|
Data: []byte("\000x\000\000\000\000\000\000\000\000\000\340\324\003\302|\005\224\000\301\254}\351z2\320O\355\264n[\007\316\224\243md\371\320\375\032Pm\nS\235\016Q\255\006P\275\250\277\312\201Z\003(3\240R\207\332\005(\017\240\322\201\332=(\023\2402\203Z\007(w\2402\201Z\017(\023\265\227\364P\033@\245\007\364\nP\033C\245\002t\036P+@e\036\364\016Pk@e\002t:P;A\245\001\364\nS\373@\245\006t\006P+C\345\002\364\006Pk@\345\036t\nP\033A\245\003\364:P\033@\245\006t\016ZJ\377\\\205\313\210\327\270\017\345+F[\310\347E)\355\024\241\366\342}(v\215(N\203)\326\207(\336\203(V\332W\362\202t4\240m\005(\377AJ\006\320\322\202t\374\240\255\003(oA\312:\3202"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
QueryIndex: 1,
|
|
|
|
},
|
2019-08-19 20:16:10 +00:00
|
|
|
}, results)
|
2017-10-23 20:28:17 +00:00
|
|
|
}
|
|
|
|
|
2018-11-15 13:22:16 +00:00
|
|
|
type fakeDB struct {
|
|
|
|
err error
|
|
|
|
closer func()
|
|
|
|
}
|
|
|
|
|
2019-11-18 19:53:33 +00:00
|
|
|
func (f *fakeDB) CleanTombstones() error { return f.err }
|
|
|
|
func (f *fakeDB) Delete(mint, maxt int64, ms ...*labels.Matcher) error { return f.err }
|
2018-11-15 13:22:16 +00:00
|
|
|
func (f *fakeDB) Dir() string {
|
|
|
|
dir, _ := ioutil.TempDir("", "fakeDB")
|
|
|
|
f.closer = func() {
|
|
|
|
os.RemoveAll(dir)
|
|
|
|
}
|
|
|
|
return dir
|
|
|
|
}
|
|
|
|
func (f *fakeDB) Snapshot(dir string, withHead bool) error { return f.err }
|
2019-11-12 10:15:20 +00:00
|
|
|
func (f *fakeDB) Head() *tsdb.Head {
|
2020-01-30 07:12:43 +00:00
|
|
|
h, _ := tsdb.NewHead(nil, nil, nil, 1000, tsdb.DefaultStripeSize)
|
2019-11-12 10:15:20 +00:00
|
|
|
return h
|
|
|
|
}
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
func TestAdminEndpoints(t *testing.T) {
|
2019-03-25 23:01:12 +00:00
|
|
|
tsdb, tsdbWithError := &fakeDB{}, &fakeDB{err: errors.New("some error")}
|
2018-11-15 13:22:16 +00:00
|
|
|
snapshotAPI := func(api *API) apiFunc { return api.snapshot }
|
|
|
|
cleanAPI := func(api *API) apiFunc { return api.cleanTombstones }
|
|
|
|
deleteAPI := func(api *API) apiFunc { return api.deleteSeries }
|
|
|
|
|
|
|
|
for i, tc := range []struct {
|
|
|
|
db *fakeDB
|
|
|
|
enableAdmin bool
|
|
|
|
endpoint func(api *API) apiFunc
|
|
|
|
method string
|
|
|
|
values url.Values
|
|
|
|
|
|
|
|
errType errorType
|
|
|
|
}{
|
|
|
|
// Tests for the snapshot endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"skip_head": {"true"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"skip_head": {"xxx"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: snapshotAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
// Tests for the cleanTombstones endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: cleanAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
// Tests for the deleteSeries endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: false,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"123"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"up"}, "start": {"xxx"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"up"}, "end": {"xxx"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorBadData,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"up"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"up{job!=\"foo\"}", "{job=~\"bar.+\"}", "up{instance!~\"fred.+\"}"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: tsdbWithError,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
2019-01-16 22:28:08 +00:00
|
|
|
values: map[string][]string{"match[]": {"up"}},
|
2018-11-15 13:22:16 +00:00
|
|
|
|
|
|
|
errType: errorInternal,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
db: nil,
|
|
|
|
enableAdmin: true,
|
|
|
|
endpoint: deleteAPI,
|
|
|
|
|
|
|
|
errType: errorUnavailable,
|
|
|
|
},
|
|
|
|
} {
|
|
|
|
tc := tc
|
|
|
|
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
|
|
|
|
api := &API{
|
|
|
|
db: func() TSDBAdmin {
|
|
|
|
if tc.db != nil {
|
|
|
|
return tc.db
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
ready: func(f http.HandlerFunc) http.HandlerFunc { return f },
|
|
|
|
enableAdmin: tc.enableAdmin,
|
|
|
|
}
|
|
|
|
defer func() {
|
|
|
|
if tc.db != nil && tc.db.closer != nil {
|
|
|
|
tc.db.closer()
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
endpoint := tc.endpoint(api)
|
|
|
|
req, err := http.NewRequest(tc.method, fmt.Sprintf("?%s", tc.values.Encode()), nil)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error when creating test request: %s", err)
|
|
|
|
}
|
2018-11-30 14:27:12 +00:00
|
|
|
res := endpoint(req)
|
|
|
|
assertAPIError(t, res.err, tc.errType)
|
2018-11-15 13:22:16 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-06-04 16:07:57 +00:00
|
|
|
func TestRespondSuccess(t *testing.T) {
|
2015-07-02 08:37:19 +00:00
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 17:44:45 +00:00
|
|
|
api := API{}
|
2018-11-30 14:27:12 +00:00
|
|
|
api.respond(w, "test", nil)
|
2015-07-02 08:37:19 +00:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
2015-06-04 16:07:57 +00:00
|
|
|
|
2015-07-02 08:37:19 +00:00
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
2015-07-02 08:37:19 +00:00
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
2015-06-04 16:07:57 +00:00
|
|
|
if err != nil {
|
2015-07-02 08:37:19 +00:00
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
2015-07-02 08:37:19 +00:00
|
|
|
if resp.StatusCode != 200 {
|
|
|
|
t.Fatalf("Return code %d expected in success response but got %d", 200, resp.StatusCode)
|
|
|
|
}
|
|
|
|
if h := resp.Header.Get("Content-Type"); h != "application/json" {
|
|
|
|
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
|
|
|
}
|
|
|
|
|
|
|
|
var res response
|
|
|
|
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
|
|
|
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
exp := &response{
|
|
|
|
Status: statusSuccess,
|
|
|
|
Data: "test",
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(&res, exp) {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", res, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestRespondError(t *testing.T) {
|
2015-07-02 08:37:19 +00:00
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 17:44:45 +00:00
|
|
|
api := API{}
|
|
|
|
api.respondError(w, &apiError{errorTimeout, errors.New("message")}, "test")
|
2015-07-02 08:37:19 +00:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
2015-06-04 16:07:57 +00:00
|
|
|
|
2015-07-02 08:37:19 +00:00
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
2015-07-02 08:37:19 +00:00
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
2015-06-04 16:07:57 +00:00
|
|
|
if err != nil {
|
2015-07-02 08:37:19 +00:00
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
2015-11-11 22:00:54 +00:00
|
|
|
if want, have := http.StatusServiceUnavailable, resp.StatusCode; want != have {
|
|
|
|
t.Fatalf("Return code %d expected in error response but got %d", want, have)
|
2015-07-02 08:37:19 +00:00
|
|
|
}
|
|
|
|
if h := resp.Header.Get("Content-Type"); h != "application/json" {
|
|
|
|
t.Fatalf("Expected Content-Type %q but got %q", "application/json", h)
|
|
|
|
}
|
|
|
|
|
|
|
|
var res response
|
|
|
|
if err = json.Unmarshal([]byte(body), &res); err != nil {
|
|
|
|
t.Fatalf("Error unmarshaling JSON body: %s", err)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
exp := &response{
|
|
|
|
Status: statusError,
|
|
|
|
Data: "test",
|
|
|
|
ErrorType: errorTimeout,
|
|
|
|
Error: "message",
|
|
|
|
}
|
|
|
|
if !reflect.DeepEqual(&res, exp) {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", res, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestParseTime(t *testing.T) {
|
|
|
|
ts, err := time.Parse(time.RFC3339Nano, "2015-06-03T13:21:58.555Z")
|
|
|
|
if err != nil {
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
var tests = []struct {
|
|
|
|
input string
|
|
|
|
fail bool
|
|
|
|
result time.Time
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
input: "",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "abc",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "30s",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "123",
|
|
|
|
result: time.Unix(123, 0),
|
|
|
|
}, {
|
|
|
|
input: "123.123",
|
|
|
|
result: time.Unix(123, 123000000),
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T13:21:58.555Z",
|
|
|
|
result: ts,
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T14:21:58.555+01:00",
|
|
|
|
result: ts,
|
2018-12-03 12:25:54 +00:00
|
|
|
}, {
|
|
|
|
// Test float rounding.
|
|
|
|
input: "1543578564.705",
|
|
|
|
result: time.Unix(1543578564, 705*1e6),
|
2015-06-04 16:07:57 +00:00
|
|
|
},
|
2019-07-08 09:43:59 +00:00
|
|
|
{
|
|
|
|
input: minTime.Format(time.RFC3339Nano),
|
|
|
|
result: minTime,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: maxTime.Format(time.RFC3339Nano),
|
|
|
|
result: maxTime,
|
|
|
|
},
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
ts, err := parseTime(test.input)
|
|
|
|
if err != nil && !test.fail {
|
|
|
|
t.Errorf("Unexpected error for %q: %s", test.input, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err == nil && test.fail {
|
|
|
|
t.Errorf("Expected error for %q but got none", test.input)
|
|
|
|
continue
|
|
|
|
}
|
2016-12-30 09:43:44 +00:00
|
|
|
if !test.fail && !ts.Equal(test.result) {
|
|
|
|
t.Errorf("Expected time %v for input %q but got %v", test.result, test.input, ts)
|
2015-06-04 16:07:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestParseDuration(t *testing.T) {
|
|
|
|
var tests = []struct {
|
|
|
|
input string
|
|
|
|
fail bool
|
|
|
|
result time.Duration
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
input: "",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "abc",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
input: "2015-06-03T13:21:58.555Z",
|
|
|
|
fail: true,
|
2017-03-16 14:16:20 +00:00
|
|
|
}, {
|
|
|
|
// Internal int64 overflow.
|
|
|
|
input: "-148966367200.372",
|
|
|
|
fail: true,
|
|
|
|
}, {
|
|
|
|
// Internal int64 overflow.
|
|
|
|
input: "148966367200.372",
|
|
|
|
fail: true,
|
2015-06-04 16:07:57 +00:00
|
|
|
}, {
|
|
|
|
input: "123",
|
|
|
|
result: 123 * time.Second,
|
|
|
|
}, {
|
|
|
|
input: "123.333",
|
|
|
|
result: 123*time.Second + 333*time.Millisecond,
|
|
|
|
}, {
|
|
|
|
input: "15s",
|
|
|
|
result: 15 * time.Second,
|
|
|
|
}, {
|
|
|
|
input: "5m",
|
|
|
|
result: 5 * time.Minute,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
d, err := parseDuration(test.input)
|
|
|
|
if err != nil && !test.fail {
|
|
|
|
t.Errorf("Unexpected error for %q: %s", test.input, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err == nil && test.fail {
|
|
|
|
t.Errorf("Expected error for %q but got none", test.input)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if !test.fail && d != test.result {
|
|
|
|
t.Errorf("Expected duration %v for input %q but got %v", test.result, test.input, d)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-01-26 00:32:46 +00:00
|
|
|
|
|
|
|
func TestOptionsMethod(t *testing.T) {
|
2017-05-02 23:49:29 +00:00
|
|
|
r := route.New()
|
2017-10-06 15:20:20 +00:00
|
|
|
api := &API{ready: func(f http.HandlerFunc) http.HandlerFunc { return f }}
|
2016-01-26 00:32:46 +00:00
|
|
|
api.Register(r)
|
|
|
|
|
|
|
|
s := httptest.NewServer(r)
|
|
|
|
defer s.Close()
|
|
|
|
|
|
|
|
req, err := http.NewRequest("OPTIONS", s.URL+"/any_path", nil)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error creating OPTIONS request: %s", err)
|
|
|
|
}
|
|
|
|
client := &http.Client{}
|
|
|
|
resp, err := client.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error executing OPTIONS request: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if resp.StatusCode != http.StatusNoContent {
|
|
|
|
t.Fatalf("Expected status %d, got %d", http.StatusNoContent, resp.StatusCode)
|
|
|
|
}
|
|
|
|
}
|
2018-02-07 15:40:36 +00:00
|
|
|
|
2018-02-08 17:28:55 +00:00
|
|
|
func TestRespond(t *testing.T) {
|
|
|
|
cases := []struct {
|
|
|
|
response interface{}
|
|
|
|
expected string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
response: &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeMatrix,
|
2018-02-08 17:28:55 +00:00
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: []promql.Point{{V: 1, T: 1000}},
|
|
|
|
Metric: labels.FromStrings("__name__", "foo"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"values":[[1,"1"]]}]}}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 0, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"0"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1},
|
|
|
|
expected: `{"status":"success","data":[0.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 10},
|
2018-02-07 12:27:57 +00:00
|
|
|
expected: `{"status":"success","data":[0.010,"20"]}`,
|
2018-02-08 17:28:55 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 100},
|
2018-02-07 12:27:57 +00:00
|
|
|
expected: `{"status":"success","data":[0.100,"20"]}`,
|
2018-02-08 17:28:55 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1001},
|
|
|
|
expected: `{"status":"success","data":[1.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1010},
|
2018-02-07 12:27:57 +00:00
|
|
|
expected: `{"status":"success","data":[1.010,"20"]}`,
|
2018-02-08 17:28:55 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 1100},
|
2018-02-07 12:27:57 +00:00
|
|
|
expected: `{"status":"success","data":[1.100,"20"]}`,
|
2018-02-08 17:28:55 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: 12345678123456555},
|
2018-02-07 12:27:57 +00:00
|
|
|
expected: `{"status":"success","data":[12345678123456.555,"20"]}`,
|
2018-02-08 17:28:55 +00:00
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 20, T: -1},
|
|
|
|
expected: `{"status":"success","data":[-0.001,"20"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.NaN(), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"NaN"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.Inf(1), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"+Inf"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: math.Inf(-1), T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"-Inf"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e6, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"1234567.8"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e-6, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"0.0000012345678"]}`,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
response: promql.Point{V: 1.2345678e-67, T: 0},
|
|
|
|
expected: `{"status":"success","data":[0,"1.2345678e-67"]}`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, c := range cases {
|
|
|
|
s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
2018-07-06 17:44:45 +00:00
|
|
|
api := API{}
|
2018-11-30 14:27:12 +00:00
|
|
|
api.respond(w, c.response, nil)
|
2018-02-08 17:28:55 +00:00
|
|
|
}))
|
|
|
|
defer s.Close()
|
|
|
|
|
|
|
|
resp, err := http.Get(s.URL)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error on test request: %s", err)
|
|
|
|
}
|
|
|
|
body, err := ioutil.ReadAll(resp.Body)
|
|
|
|
defer resp.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error reading response body: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if string(body) != c.expected {
|
|
|
|
t.Fatalf("Expected response \n%v\n but got \n%v\n", c.expected, string(body))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-12 10:15:20 +00:00
|
|
|
func TestTSDBStatus(t *testing.T) {
|
|
|
|
tsdb := &fakeDB{}
|
|
|
|
tsdbStatusAPI := func(api *API) apiFunc { return api.serveTSDBStatus }
|
|
|
|
|
|
|
|
for i, tc := range []struct {
|
|
|
|
db *fakeDB
|
|
|
|
endpoint func(api *API) apiFunc
|
|
|
|
method string
|
|
|
|
values url.Values
|
|
|
|
|
|
|
|
errType errorType
|
|
|
|
}{
|
|
|
|
// Tests for the TSDB Status endpoint.
|
|
|
|
{
|
|
|
|
db: tsdb,
|
|
|
|
endpoint: tsdbStatusAPI,
|
|
|
|
|
|
|
|
errType: errorNone,
|
|
|
|
},
|
|
|
|
} {
|
|
|
|
tc := tc
|
|
|
|
t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
|
|
|
|
api := &API{
|
|
|
|
db: func() TSDBAdmin {
|
|
|
|
if tc.db != nil {
|
|
|
|
return tc.db
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
}
|
|
|
|
endpoint := tc.endpoint(api)
|
|
|
|
req, err := http.NewRequest(tc.method, fmt.Sprintf("?%s", tc.values.Encode()), nil)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Error when creating test request: %s", err)
|
|
|
|
}
|
|
|
|
res := endpoint(req)
|
|
|
|
assertAPIError(t, res.err, tc.errType)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-07 15:40:36 +00:00
|
|
|
// This is a global to avoid the benchmark being optimized away.
|
|
|
|
var testResponseWriter = httptest.ResponseRecorder{}
|
|
|
|
|
|
|
|
func BenchmarkRespond(b *testing.B) {
|
|
|
|
b.ReportAllocs()
|
|
|
|
points := []promql.Point{}
|
|
|
|
for i := 0; i < 10000; i++ {
|
|
|
|
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)})
|
|
|
|
}
|
|
|
|
response := &queryData{
|
2020-02-03 18:23:07 +00:00
|
|
|
ResultType: parser.ValueTypeMatrix,
|
2018-02-07 15:40:36 +00:00
|
|
|
Result: promql.Matrix{
|
|
|
|
promql.Series{
|
|
|
|
Points: points,
|
|
|
|
Metric: nil,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2018-02-08 17:28:55 +00:00
|
|
|
b.ResetTimer()
|
2018-07-06 17:44:45 +00:00
|
|
|
api := API{}
|
2018-02-07 15:40:36 +00:00
|
|
|
for n := 0; n < b.N; n++ {
|
2018-11-30 14:27:12 +00:00
|
|
|
api.respond(&testResponseWriter, response, nil)
|
2018-02-07 15:40:36 +00:00
|
|
|
}
|
|
|
|
}
|