2016-04-13 14:08:22 +00:00
// Copyright 2016 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
2015-06-04 16:07:57 +00:00
package v1
import (
2017-10-25 04:21:42 +00:00
"context"
2015-06-04 16:07:57 +00:00
"encoding/json"
2023-11-08 03:49:39 +00:00
"errors"
2015-06-04 16:07:57 +00:00
"fmt"
2022-04-27 09:24:36 +00:00
"io"
2015-06-04 16:07:57 +00:00
"net/http"
"net/http/httptest"
"net/url"
2018-11-15 13:22:16 +00:00
"os"
2015-06-04 16:07:57 +00:00
"reflect"
2020-07-31 15:03:02 +00:00
"runtime"
2019-12-09 21:36:38 +00:00
"sort"
2017-11-11 00:53:48 +00:00
"strings"
2015-06-04 16:07:57 +00:00
"testing"
"time"
2022-10-20 09:17:00 +00:00
"github.com/prometheus/prometheus/prompb"
2022-02-10 14:17:05 +00:00
"github.com/prometheus/prometheus/util/stats"
2021-06-11 16:17:59 +00:00
"github.com/go-kit/log"
2018-09-07 21:26:04 +00:00
"github.com/prometheus/client_golang/prometheus"
2018-06-16 17:26:37 +00:00
config_util "github.com/prometheus/common/config"
2015-08-20 15:18:46 +00:00
"github.com/prometheus/common/model"
2018-06-16 17:26:37 +00:00
"github.com/prometheus/common/promlog"
2015-09-24 15:07:11 +00:00
"github.com/prometheus/common/route"
2020-10-29 09:43:23 +00:00
"github.com/stretchr/testify/require"
2015-06-04 16:07:57 +00:00
2017-05-11 15:09:24 +00:00
"github.com/prometheus/prometheus/config"
2021-11-08 14:23:17 +00:00
"github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/model/textparse"
"github.com/prometheus/prometheus/model/timestamp"
2015-06-04 16:07:57 +00:00
"github.com/prometheus/prometheus/promql"
2020-02-03 18:23:07 +00:00
"github.com/prometheus/prometheus/promql/parser"
2018-03-25 16:50:34 +00:00
"github.com/prometheus/prometheus/rules"
2018-02-01 09:55:07 +00:00
"github.com/prometheus/prometheus/scrape"
2018-05-08 08:48:13 +00:00
"github.com/prometheus/prometheus/storage"
2017-10-23 20:28:17 +00:00
"github.com/prometheus/prometheus/storage/remote"
2019-11-18 19:53:33 +00:00
"github.com/prometheus/prometheus/tsdb"
2019-08-09 01:35:39 +00:00
"github.com/prometheus/prometheus/util/teststorage"
2015-06-04 16:07:57 +00:00
)
2023-08-18 18:48:59 +00:00
var testEngine = promql . NewEngine ( promql . EngineOpts {
Logger : nil ,
Reg : nil ,
MaxSamples : 10000 ,
Timeout : 100 * time . Second ,
NoStepSubqueryIntervalFn : func ( int64 ) int64 { return 60 * 1000 } ,
EnableAtModifier : true ,
EnableNegativeOffset : true ,
EnablePerStepStats : true ,
} )
2019-12-04 19:33:01 +00:00
// testMetaStore satisfies the scrape.MetricMetadataStore interface.
// It is used to inject specific metadata as part of a test case.
type testMetaStore struct {
Metadata [ ] scrape . MetricMetadata
}
func ( s * testMetaStore ) ListMetadata ( ) [ ] scrape . MetricMetadata {
return s . Metadata
}
func ( s * testMetaStore ) GetMetadata ( metric string ) ( scrape . MetricMetadata , bool ) {
for _ , m := range s . Metadata {
if metric == m . Metric {
return m , true
}
}
return scrape . MetricMetadata { } , false
}
2020-01-29 11:13:18 +00:00
func ( s * testMetaStore ) SizeMetadata ( ) int { return 0 }
func ( s * testMetaStore ) LengthMetadata ( ) int { return 0 }
2019-12-04 19:33:01 +00:00
// testTargetRetriever represents a list of targets to scrape.
// It is used to represent targets as part of test cases.
2019-12-04 11:08:21 +00:00
type testTargetRetriever struct {
activeTargets map [ string ] [ ] * scrape . Target
droppedTargets map [ string ] [ ] * scrape . Target
}
type testTargetParams struct {
Identifier string
2022-02-27 14:19:21 +00:00
Labels labels . Labels
DiscoveredLabels labels . Labels
2019-12-04 11:08:21 +00:00
Params url . Values
Reports [ ] * testReport
Active bool
}
type testReport struct {
Start time . Time
Duration time . Duration
Error error
}
func newTestTargetRetriever ( targetsInfo [ ] * testTargetParams ) * testTargetRetriever {
var activeTargets map [ string ] [ ] * scrape . Target
var droppedTargets map [ string ] [ ] * scrape . Target
activeTargets = make ( map [ string ] [ ] * scrape . Target )
droppedTargets = make ( map [ string ] [ ] * scrape . Target )
for _ , t := range targetsInfo {
nt := scrape . NewTarget ( t . Labels , t . DiscoveredLabels , t . Params )
for _ , r := range t . Reports {
nt . Report ( r . Start , r . Duration , r . Error )
}
if t . Active {
activeTargets [ t . Identifier ] = [ ] * scrape . Target { nt }
} else {
droppedTargets [ t . Identifier ] = [ ] * scrape . Target { nt }
}
}
return & testTargetRetriever {
activeTargets : activeTargets ,
droppedTargets : droppedTargets ,
}
}
2016-12-02 12:31:43 +00:00
2021-10-22 08:06:44 +00:00
var scrapeStart = time . Now ( ) . Add ( - 11 * time . Second )
2019-11-11 21:42:24 +00:00
2018-09-26 09:20:56 +00:00
func ( t testTargetRetriever ) TargetsActive ( ) map [ string ] [ ] * scrape . Target {
2019-12-04 11:08:21 +00:00
return t . activeTargets
2018-02-21 17:26:18 +00:00
}
2019-12-04 11:08:21 +00:00
2018-09-26 09:20:56 +00:00
func ( t testTargetRetriever ) TargetsDropped ( ) map [ string ] [ ] * scrape . Target {
2019-12-04 11:08:21 +00:00
return t . droppedTargets
2016-12-02 12:31:43 +00:00
}
2023-08-14 14:39:25 +00:00
func ( t testTargetRetriever ) TargetsDroppedCounts ( ) map [ string ] int {
r := make ( map [ string ] int )
for k , v := range t . droppedTargets {
r [ k ] = len ( v )
}
return r
}
2019-12-10 14:56:16 +00:00
func ( t * testTargetRetriever ) SetMetadataStoreForTargets ( identifier string , metadata scrape . MetricMetadataStore ) error {
2019-12-04 19:33:01 +00:00
targets , ok := t . activeTargets [ identifier ]
if ! ok {
return errors . New ( "targets not found" )
}
for _ , at := range targets {
at . SetMetadataStore ( metadata )
}
return nil
}
2019-12-10 14:56:16 +00:00
func ( t * testTargetRetriever ) ResetMetadataStore ( ) {
for _ , at := range t . activeTargets {
for _ , tt := range at {
tt . SetMetadataStore ( & testMetaStore { } )
}
}
}
2020-04-16 08:30:47 +00:00
func ( t * testTargetRetriever ) toFactory ( ) func ( context . Context ) TargetRetriever {
2020-05-18 18:02:32 +00:00
return func ( context . Context ) TargetRetriever { return t }
2020-04-16 08:30:47 +00:00
}
2018-02-21 09:00:07 +00:00
type testAlertmanagerRetriever struct { }
2017-01-13 09:20:11 +00:00
2018-02-21 09:00:07 +00:00
func ( t testAlertmanagerRetriever ) Alertmanagers ( ) [ ] * url . URL {
return [ ] * url . URL {
{
Scheme : "http" ,
Host : "alertmanager.example.com:8080" ,
Path : "/api/v1/alerts" ,
} ,
}
}
func ( t testAlertmanagerRetriever ) DroppedAlertmanagers ( ) [ ] * url . URL {
return [ ] * url . URL {
{
Scheme : "http" ,
Host : "dropped.alertmanager.example.com:8080" ,
Path : "/api/v1/alerts" ,
} ,
}
2016-12-02 12:31:43 +00:00
}
2020-05-18 18:02:32 +00:00
func ( t testAlertmanagerRetriever ) toFactory ( ) func ( context . Context ) AlertmanagerRetriever {
return func ( context . Context ) AlertmanagerRetriever { return t }
}
2018-06-27 07:15:17 +00:00
type rulesRetrieverMock struct {
2023-10-18 02:02:03 +00:00
alertingRules [ ] * rules . AlertingRule
ruleGroups [ ] * rules . Group
testing * testing . T
2018-03-25 16:50:34 +00:00
}
2023-10-18 02:02:03 +00:00
func ( m * rulesRetrieverMock ) CreateAlertingRules ( ) {
2020-02-03 18:23:07 +00:00
expr1 , err := parser . ParseExpr ( ` absent(test_metric3) != 1 ` )
2018-03-25 16:50:34 +00:00
if err != nil {
2018-06-27 07:15:17 +00:00
m . testing . Fatalf ( "unable to parse alert expression: %s" , err )
2018-03-25 16:50:34 +00:00
}
2020-02-03 18:23:07 +00:00
expr2 , err := parser . ParseExpr ( ` up == 1 ` )
2018-03-25 16:50:34 +00:00
if err != nil {
2018-06-27 07:15:17 +00:00
m . testing . Fatalf ( "Unable to parse alert expression: %s" , err )
2018-03-25 16:50:34 +00:00
}
2023-10-18 02:02:03 +00:00
expr3 , err := parser . ParseExpr ( ` vector(1) ` )
if err != nil {
m . testing . Fatalf ( "Unable to parse alert expression: %s" , err )
}
2018-03-25 16:50:34 +00:00
rule1 := rules . NewAlertingRule (
"test_metric3" ,
expr1 ,
time . Second ,
2023-01-09 11:21:38 +00:00
0 ,
2018-03-25 16:50:34 +00:00
labels . Labels { } ,
labels . Labels { } ,
2019-04-15 16:52:58 +00:00
labels . Labels { } ,
2021-05-31 03:56:01 +00:00
"" ,
2018-08-02 10:18:24 +00:00
true ,
2018-03-25 16:50:34 +00:00
log . NewNopLogger ( ) ,
)
rule2 := rules . NewAlertingRule (
"test_metric4" ,
expr2 ,
time . Second ,
2023-01-09 11:21:38 +00:00
0 ,
2018-03-25 16:50:34 +00:00
labels . Labels { } ,
labels . Labels { } ,
2019-04-15 16:52:58 +00:00
labels . Labels { } ,
2021-05-31 03:56:01 +00:00
"" ,
2018-08-02 10:18:24 +00:00
true ,
2018-03-25 16:50:34 +00:00
log . NewNopLogger ( ) ,
)
2023-10-18 02:02:03 +00:00
rule3 := rules . NewAlertingRule (
"test_metric5" ,
expr3 ,
time . Second ,
0 ,
labels . FromStrings ( "name" , "tm5" ) ,
labels . Labels { } ,
labels . FromStrings ( "name" , "tm5" ) ,
"" ,
false ,
log . NewNopLogger ( ) ,
)
2018-03-25 16:50:34 +00:00
var r [ ] * rules . AlertingRule
r = append ( r , rule1 )
r = append ( r , rule2 )
2023-10-18 02:02:03 +00:00
r = append ( r , rule3 )
m . alertingRules = r
2018-03-25 16:50:34 +00:00
}
2023-10-18 02:02:03 +00:00
func ( m * rulesRetrieverMock ) CreateRuleGroups ( ) {
m . CreateAlertingRules ( )
arules := m . AlertingRules ( )
2019-08-09 01:35:39 +00:00
storage := teststorage . New ( m . testing )
2018-03-25 16:50:34 +00:00
defer storage . Close ( )
2018-10-02 11:59:19 +00:00
engineOpts := promql . EngineOpts {
2020-01-28 20:38:49 +00:00
Logger : nil ,
Reg : nil ,
MaxSamples : 10 ,
Timeout : 100 * time . Second ,
2018-10-02 11:59:19 +00:00
}
engine := promql . NewEngine ( engineOpts )
2018-03-25 16:50:34 +00:00
opts := & rules . ManagerOptions {
QueryFunc : rules . EngineQueryFunc ( engine , storage ) ,
Appendable : storage ,
Context : context . Background ( ) ,
Logger : log . NewNopLogger ( ) ,
2023-10-18 02:02:03 +00:00
NotifyFunc : func ( ctx context . Context , expr string , alerts ... * rules . Alert ) { } ,
2018-03-25 16:50:34 +00:00
}
var r [ ] rules . Rule
for _ , alertrule := range arules {
r = append ( r , alertrule )
}
2020-02-03 18:23:07 +00:00
recordingExpr , err := parser . ParseExpr ( ` vector(1) ` )
2018-06-27 07:15:17 +00:00
if err != nil {
m . testing . Fatalf ( "unable to parse alert expression: %s" , err )
}
recordingRule := rules . NewRecordingRule ( "recording-rule-1" , recordingExpr , labels . Labels { } )
r = append ( r , recordingRule )
2020-02-12 15:22:18 +00:00
group := rules . NewGroup ( rules . GroupOptions {
Name : "grp" ,
File : "/path/to/file" ,
Interval : time . Second ,
Rules : r ,
ShouldRestore : false ,
Opts : opts ,
} )
2023-10-18 02:02:03 +00:00
m . ruleGroups = [ ] * rules . Group { group }
}
func ( m * rulesRetrieverMock ) AlertingRules ( ) [ ] * rules . AlertingRule {
return m . alertingRules
}
func ( m * rulesRetrieverMock ) RuleGroups ( ) [ ] * rules . Group {
return m . ruleGroups
2018-03-25 16:50:34 +00:00
}
2023-10-18 02:02:03 +00:00
func ( m * rulesRetrieverMock ) toFactory ( ) func ( context . Context ) RulesRetriever {
2020-05-18 18:02:32 +00:00
return func ( context . Context ) RulesRetriever { return m }
}
2017-05-11 15:09:24 +00:00
var samplePrometheusCfg = config . Config {
GlobalConfig : config . GlobalConfig { } ,
AlertingConfig : config . AlertingConfig { } ,
RuleFiles : [ ] string { } ,
ScrapeConfigs : [ ] * config . ScrapeConfig { } ,
RemoteWriteConfigs : [ ] * config . RemoteWriteConfig { } ,
RemoteReadConfigs : [ ] * config . RemoteReadConfig { } ,
}
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 08:49:02 +00:00
var sampleFlagMap = map [ string ] string {
"flag1" : "value1" ,
"flag2" : "value2" ,
}
2015-06-04 16:07:57 +00:00
func TestEndpoints ( t * testing . T ) {
2023-08-18 18:48:59 +00:00
storage := promql . LoadedStorage ( t , `
2015-06-04 16:07:57 +00:00
load 1 m
test_metric1 { foo = "bar" } 0 + 100 x100
test_metric1 { foo = "boo" } 1 + 0x100
test_metric2 { foo = "boo" } 1 + 0x100
2020-08-28 23:21:39 +00:00
test_metric3 { foo = "bar" , dup = "1" } 1 + 0x100
test_metric3 { foo = "boo" , dup = "1" } 1 + 0x100
test_metric4 { foo = "bar" , dup = "1" } 1 + 0x100
test_metric4 { foo = "boo" , dup = "1" } 1 + 0x100
test_metric4 { foo = "boo" } 1 + 0x100
2015-06-04 16:07:57 +00:00
` )
2023-08-18 18:48:59 +00:00
t . Cleanup ( func ( ) { storage . Close ( ) } )
2021-03-16 09:47:45 +00:00
start := time . Unix ( 0 , 0 )
exemplars := [ ] exemplar . QueryResult {
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 2 * time . Second ) ) ,
} ,
} ,
} ,
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric4" , "foo" , "bar" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "lul" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 4 * time . Second ) ) ,
} ,
} ,
} ,
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc2" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 4053 * time . Millisecond ) ) ,
} ,
} ,
} ,
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric4" , "foo" , "bar" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "lul2" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 4153 * time . Millisecond ) ) ,
} ,
} ,
} ,
}
for _ , ed := range exemplars {
2023-08-18 18:48:59 +00:00
_ , err := storage . AppendExemplar ( 0 , ed . SeriesLabels , ed . Exemplars [ 0 ] )
2021-03-16 09:47:45 +00:00
require . NoError ( t , err , "failed to add exemplar: %+v" , ed . Exemplars [ 0 ] )
}
2016-12-30 09:43:44 +00:00
now := time . Now ( )
2016-12-02 12:31:43 +00:00
2018-06-27 07:15:17 +00:00
t . Run ( "local" , func ( t * testing . T ) {
2023-10-18 02:02:03 +00:00
algr := rulesRetrieverMock { }
2018-06-27 07:15:17 +00:00
algr . testing = t
2018-03-25 16:50:34 +00:00
2023-10-18 02:02:03 +00:00
algr . CreateAlertingRules ( )
algr . CreateRuleGroups ( )
2018-03-25 16:50:34 +00:00
2023-10-18 02:02:03 +00:00
g := algr . RuleGroups ( )
g [ 0 ] . Eval ( context . Background ( ) , time . Now ( ) )
2018-03-25 16:50:34 +00:00
2019-12-04 19:33:01 +00:00
testTargetRetriever := setupTestTargetRetriever ( t )
2019-12-04 11:08:21 +00:00
2018-06-16 17:26:37 +00:00
api := & API {
2023-08-18 18:48:59 +00:00
Queryable : storage ,
QueryEngine : testEngine ,
ExemplarQueryable : storage . ExemplarQueryable ( ) ,
2020-04-16 08:30:47 +00:00
targetRetriever : testTargetRetriever . toFactory ( ) ,
2020-05-18 18:02:32 +00:00
alertmanagerRetriever : testAlertmanagerRetriever { } . toFactory ( ) ,
2018-11-19 10:21:14 +00:00
flagsMap : sampleFlagMap ,
2018-10-16 07:41:45 +00:00
now : func ( ) time . Time { return now } ,
config : func ( ) config . Config { return samplePrometheusCfg } ,
ready : func ( f http . HandlerFunc ) http . HandlerFunc { return f } ,
2020-05-18 18:02:32 +00:00
rulesRetriever : algr . toFactory ( ) ,
2018-06-16 17:26:37 +00:00
}
2023-08-18 18:48:59 +00:00
testEndpoints ( t , api , testTargetRetriever , storage , true )
2018-06-16 17:26:37 +00:00
} )
2017-01-13 09:20:11 +00:00
2023-10-04 08:36:55 +00:00
// Run all the API tests against an API that is wired to forward queries via
2018-06-16 17:26:37 +00:00
// the remote read client to a test server, which in turn sends them to the
2023-08-18 18:48:59 +00:00
// data from the test storage.
2018-06-16 17:26:37 +00:00
t . Run ( "remote" , func ( t * testing . T ) {
2023-08-18 18:48:59 +00:00
server := setupRemote ( storage )
2018-06-16 17:26:37 +00:00
defer server . Close ( )
u , err := url . Parse ( server . URL )
2020-10-29 09:43:23 +00:00
require . NoError ( t , err )
2018-06-16 17:26:37 +00:00
al := promlog . AllowedLevel { }
2020-10-29 09:43:23 +00:00
require . NoError ( t , al . Set ( "debug" ) )
2019-07-29 17:00:30 +00:00
2018-11-23 13:22:40 +00:00
af := promlog . AllowedFormat { }
2020-10-29 09:43:23 +00:00
require . NoError ( t , af . Set ( "logfmt" ) )
2019-07-29 17:00:30 +00:00
2018-11-23 13:22:40 +00:00
promlogConfig := promlog . Config {
Level : & al ,
Format : & af ,
}
2021-12-08 22:14:50 +00:00
dbDir := t . TempDir ( )
2018-09-07 21:26:04 +00:00
2020-11-19 15:23:03 +00:00
remote := remote . NewStorage ( promlog . New ( & promlogConfig ) , prometheus . DefaultRegisterer , func ( ) ( int64 , error ) {
return 0 , nil
} , dbDir , 1 * time . Second , nil )
2018-06-16 17:26:37 +00:00
err = remote . ApplyConfig ( & config . Config {
RemoteReadConfigs : [ ] * config . RemoteReadConfig {
{
URL : & config_util . URL { URL : u } ,
RemoteTimeout : model . Duration ( 1 * time . Second ) ,
ReadRecent : true ,
} ,
} ,
} )
2020-10-29 09:43:23 +00:00
require . NoError ( t , err )
2018-06-16 17:26:37 +00:00
2023-10-18 02:02:03 +00:00
algr := rulesRetrieverMock { }
2018-06-27 07:15:17 +00:00
algr . testing = t
2018-03-25 16:50:34 +00:00
2023-10-18 02:02:03 +00:00
algr . CreateAlertingRules ( )
algr . CreateRuleGroups ( )
2018-03-25 16:50:34 +00:00
2023-10-18 02:02:03 +00:00
g := algr . RuleGroups ( )
g [ 0 ] . Eval ( context . Background ( ) , time . Now ( ) )
2018-03-25 16:50:34 +00:00
2019-12-04 19:33:01 +00:00
testTargetRetriever := setupTestTargetRetriever ( t )
2019-12-04 11:08:21 +00:00
2018-06-16 17:26:37 +00:00
api := & API {
Queryable : remote ,
2023-08-18 18:48:59 +00:00
QueryEngine : testEngine ,
ExemplarQueryable : storage . ExemplarQueryable ( ) ,
2020-04-16 08:30:47 +00:00
targetRetriever : testTargetRetriever . toFactory ( ) ,
2020-05-18 18:02:32 +00:00
alertmanagerRetriever : testAlertmanagerRetriever { } . toFactory ( ) ,
2018-11-19 10:21:14 +00:00
flagsMap : sampleFlagMap ,
2018-10-16 07:41:45 +00:00
now : func ( ) time . Time { return now } ,
config : func ( ) config . Config { return samplePrometheusCfg } ,
ready : func ( f http . HandlerFunc ) http . HandlerFunc { return f } ,
2020-05-18 18:02:32 +00:00
rulesRetriever : algr . toFactory ( ) ,
2018-06-16 17:26:37 +00:00
}
2023-08-18 18:48:59 +00:00
testEndpoints ( t , api , testTargetRetriever , storage , false )
2018-06-16 17:26:37 +00:00
} )
2018-11-19 10:21:14 +00:00
}
2022-10-20 09:17:00 +00:00
type byLabels [ ] labels . Labels
func ( b byLabels ) Len ( ) int { return len ( b ) }
func ( b byLabels ) Swap ( i , j int ) { b [ i ] , b [ j ] = b [ j ] , b [ i ] }
func ( b byLabels ) Less ( i , j int ) bool { return labels . Compare ( b [ i ] , b [ j ] ) < 0 }
func TestGetSeries ( t * testing . T ) {
2018-11-19 10:21:14 +00:00
// TestEndpoints doesn't have enough label names to test api.labelNames
// endpoint properly. Hence we test it separately.
2023-08-18 18:48:59 +00:00
storage := promql . LoadedStorage ( t , `
2018-11-19 10:21:14 +00:00
load 1 m
test_metric1 { foo1 = "bar" , baz = "abc" } 0 + 100 x100
test_metric1 { foo2 = "boo" } 1 + 0x100
test_metric2 { foo = "boo" } 1 + 0x100
test_metric2 { foo = "boo" , xyz = "qwerty" } 1 + 0x100
2021-07-20 12:38:08 +00:00
test_metric2 { foo = "baz" , abc = "qwerty" } 1 + 0x100
2018-11-19 10:21:14 +00:00
` )
2023-08-18 18:48:59 +00:00
t . Cleanup ( func ( ) { storage . Close ( ) } )
2022-10-20 09:17:00 +00:00
api := & API {
2023-08-18 18:48:59 +00:00
Queryable : storage ,
2022-10-20 09:17:00 +00:00
}
request := func ( method string , matchers ... string ) ( * http . Request , error ) {
u , err := url . Parse ( "http://example.com" )
require . NoError ( t , err )
q := u . Query ( )
for _ , matcher := range matchers {
q . Add ( "match[]" , matcher )
}
u . RawQuery = q . Encode ( )
r , err := http . NewRequest ( method , u . String ( ) , nil )
if method == http . MethodPost {
r . Header . Set ( "Content-Type" , "application/x-www-form-urlencoded" )
}
return r , err
}
for _ , tc := range [ ] struct {
name string
api * API
matchers [ ] string
expected [ ] labels . Labels
expectedErrorType errorType
} {
{
name : "no matchers" ,
expectedErrorType : errorBadData ,
api : api ,
} ,
{
name : "non empty label matcher" ,
matchers : [ ] string { ` { foo=~".+"} ` } ,
expected : [ ] labels . Labels {
2022-02-27 14:19:21 +00:00
labels . FromStrings ( "__name__" , "test_metric2" , "abc" , "qwerty" , "foo" , "baz" ) ,
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" , "xyz" , "qwerty" ) ,
2022-10-20 09:17:00 +00:00
} ,
api : api ,
} ,
{
name : "exact label matcher" ,
matchers : [ ] string { ` { foo="boo"} ` } ,
expected : [ ] labels . Labels {
2022-02-27 14:19:21 +00:00
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" , "xyz" , "qwerty" ) ,
2022-10-20 09:17:00 +00:00
} ,
api : api ,
} ,
{
name : "two matchers" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expected : [ ] labels . Labels {
2022-02-27 14:19:21 +00:00
labels . FromStrings ( "__name__" , "test_metric2" , "abc" , "qwerty" , "foo" , "baz" ) ,
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" , "xyz" , "qwerty" ) ,
2022-10-20 09:17:00 +00:00
} ,
api : api ,
} ,
{
name : "exec error type" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expectedErrorType : errorExec ,
api : & API {
Queryable : errorTestQueryable { err : fmt . Errorf ( "generic" ) } ,
} ,
} ,
{
name : "storage error type" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expectedErrorType : errorInternal ,
api : & API {
Queryable : errorTestQueryable { err : promql . ErrStorage { Err : fmt . Errorf ( "generic" ) } } ,
} ,
} ,
} {
t . Run ( tc . name , func ( t * testing . T ) {
ctx := context . Background ( )
req , err := request ( http . MethodGet , tc . matchers ... )
require . NoError ( t , err )
res := tc . api . series ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , tc . expectedErrorType )
if tc . expectedErrorType == errorNone {
r := res . data . ( [ ] labels . Labels )
sort . Sort ( byLabels ( tc . expected ) )
sort . Sort ( byLabels ( r ) )
require . Equal ( t , tc . expected , r )
}
} )
}
}
func TestQueryExemplars ( t * testing . T ) {
start := time . Unix ( 0 , 0 )
2023-08-18 18:48:59 +00:00
storage := promql . LoadedStorage ( t , `
2022-10-20 09:17:00 +00:00
load 1 m
test_metric1 { foo = "bar" } 0 + 100 x100
test_metric1 { foo = "boo" } 1 + 0x100
test_metric2 { foo = "boo" } 1 + 0x100
test_metric3 { foo = "bar" , dup = "1" } 1 + 0x100
test_metric3 { foo = "boo" , dup = "1" } 1 + 0x100
test_metric4 { foo = "bar" , dup = "1" } 1 + 0x100
test_metric4 { foo = "boo" , dup = "1" } 1 + 0x100
test_metric4 { foo = "boo" } 1 + 0x100
` )
2023-08-18 18:48:59 +00:00
t . Cleanup ( func ( ) { storage . Close ( ) } )
2022-10-20 09:17:00 +00:00
api := & API {
2023-08-18 18:48:59 +00:00
Queryable : storage ,
QueryEngine : testEngine ,
ExemplarQueryable : storage . ExemplarQueryable ( ) ,
2022-10-20 09:17:00 +00:00
}
request := func ( method string , qs url . Values ) ( * http . Request , error ) {
u , err := url . Parse ( "http://example.com" )
require . NoError ( t , err )
u . RawQuery = qs . Encode ( )
r , err := http . NewRequest ( method , u . String ( ) , nil )
if method == http . MethodPost {
r . Header . Set ( "Content-Type" , "application/x-www-form-urlencoded" )
}
return r , err
}
for _ , tc := range [ ] struct {
name string
query url . Values
exemplars [ ] exemplar . QueryResult
api * API
expectedErrorType errorType
} {
{
name : "no error" ,
api : api ,
query : url . Values {
"query" : [ ] string { ` test_metric3 { foo="boo"} - test_metric4 { foo="bar"} ` } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "4" } ,
} ,
exemplars : [ ] exemplar . QueryResult {
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 0 * time . Second ) ) ,
} ,
} ,
} ,
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric4" , "foo" , "bar" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "lul" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 3 * time . Second ) ) ,
} ,
} ,
} ,
} ,
} ,
{
name : "should return errorExec upon genetic error" ,
expectedErrorType : errorExec ,
api : & API {
ExemplarQueryable : errorTestQueryable { err : fmt . Errorf ( "generic" ) } ,
} ,
query : url . Values {
"query" : [ ] string { ` test_metric3 { foo="boo"} - test_metric4 { foo="bar"} ` } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "4" } ,
} ,
} ,
{
name : "should return errorInternal err type is ErrStorage" ,
expectedErrorType : errorInternal ,
api : & API {
ExemplarQueryable : errorTestQueryable { err : promql . ErrStorage { Err : fmt . Errorf ( "generic" ) } } ,
} ,
query : url . Values {
"query" : [ ] string { ` test_metric3 { foo="boo"} - test_metric4 { foo="bar"} ` } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "4" } ,
} ,
} ,
} {
t . Run ( tc . name , func ( t * testing . T ) {
2023-08-18 18:48:59 +00:00
es := storage
2022-10-20 09:17:00 +00:00
ctx := context . Background ( )
for _ , te := range tc . exemplars {
for _ , e := range te . Exemplars {
_ , err := es . AppendExemplar ( 0 , te . SeriesLabels , e )
if err != nil {
t . Fatal ( err )
}
}
}
2018-11-19 10:21:14 +00:00
2022-10-20 09:17:00 +00:00
req , err := request ( http . MethodGet , tc . query )
require . NoError ( t , err )
res := tc . api . queryExemplars ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , tc . expectedErrorType )
if tc . expectedErrorType == errorNone {
assertAPIResponse ( t , res . data , tc . exemplars )
}
} )
}
}
func TestLabelNames ( t * testing . T ) {
// TestEndpoints doesn't have enough label names to test api.labelNames
// endpoint properly. Hence we test it separately.
2023-08-18 18:48:59 +00:00
storage := promql . LoadedStorage ( t , `
2022-10-20 09:17:00 +00:00
load 1 m
test_metric1 { foo1 = "bar" , baz = "abc" } 0 + 100 x100
test_metric1 { foo2 = "boo" } 1 + 0x100
test_metric2 { foo = "boo" } 1 + 0x100
test_metric2 { foo = "boo" , xyz = "qwerty" } 1 + 0x100
test_metric2 { foo = "baz" , abc = "qwerty" } 1 + 0x100
` )
2023-08-18 18:48:59 +00:00
t . Cleanup ( func ( ) { storage . Close ( ) } )
2018-11-19 10:21:14 +00:00
api := & API {
2023-08-18 18:48:59 +00:00
Queryable : storage ,
2018-11-19 10:21:14 +00:00
}
2021-07-20 12:38:08 +00:00
request := func ( method string , matchers ... string ) ( * http . Request , error ) {
u , err := url . Parse ( "http://example.com" )
require . NoError ( t , err )
q := u . Query ( )
for _ , matcher := range matchers {
q . Add ( "match[]" , matcher )
}
u . RawQuery = q . Encode ( )
r , err := http . NewRequest ( method , u . String ( ) , nil )
if method == http . MethodPost {
2018-11-19 10:21:14 +00:00
r . Header . Set ( "Content-Type" , "application/x-www-form-urlencoded" )
}
2021-07-20 12:38:08 +00:00
return r , err
2018-11-19 10:21:14 +00:00
}
2021-07-20 12:38:08 +00:00
for _ , tc := range [ ] struct {
2022-10-20 09:17:00 +00:00
name string
api * API
matchers [ ] string
expected [ ] string
expectedErrorType errorType
2021-07-20 12:38:08 +00:00
} {
{
name : "no matchers" ,
expected : [ ] string { "__name__" , "abc" , "baz" , "foo" , "foo1" , "foo2" , "xyz" } ,
2022-10-20 09:17:00 +00:00
api : api ,
2021-07-20 12:38:08 +00:00
} ,
{
name : "non empty label matcher" ,
matchers : [ ] string { ` { foo=~".+"} ` } ,
expected : [ ] string { "__name__" , "abc" , "foo" , "xyz" } ,
2022-10-20 09:17:00 +00:00
api : api ,
2021-07-20 12:38:08 +00:00
} ,
{
name : "exact label matcher" ,
matchers : [ ] string { ` { foo="boo"} ` } ,
expected : [ ] string { "__name__" , "foo" , "xyz" } ,
2022-10-20 09:17:00 +00:00
api : api ,
2021-07-20 12:38:08 +00:00
} ,
{
name : "two matchers" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expected : [ ] string { "__name__" , "abc" , "foo" , "xyz" } ,
2022-10-20 09:17:00 +00:00
api : api ,
} ,
{
name : "exec error type" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expectedErrorType : errorExec ,
api : & API {
Queryable : errorTestQueryable { err : fmt . Errorf ( "generic" ) } ,
} ,
} ,
{
name : "storage error type" ,
matchers : [ ] string { ` { foo="boo"} ` , ` { foo="baz"} ` } ,
expectedErrorType : errorInternal ,
api : & API {
Queryable : errorTestQueryable { err : promql . ErrStorage { Err : fmt . Errorf ( "generic" ) } } ,
} ,
2021-07-20 12:38:08 +00:00
} ,
} {
t . Run ( tc . name , func ( t * testing . T ) {
for _ , method := range [ ] string { http . MethodGet , http . MethodPost } {
ctx := context . Background ( )
req , err := request ( method , tc . matchers ... )
require . NoError ( t , err )
2022-10-20 09:17:00 +00:00
res := tc . api . labelNames ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , tc . expectedErrorType )
if tc . expectedErrorType == errorNone {
assertAPIResponse ( t , res . data , tc . expected )
}
2021-07-20 12:38:08 +00:00
}
} )
2018-11-19 10:21:14 +00:00
}
2018-06-16 17:26:37 +00:00
}
2022-02-10 14:17:05 +00:00
type testStats struct {
Custom string ` json:"custom" `
}
func ( testStats ) Builtin ( ) ( _ stats . BuiltinStats ) {
return
}
2022-02-02 02:07:23 +00:00
func TestStats ( t * testing . T ) {
2023-08-18 18:48:59 +00:00
storage := teststorage . New ( t )
t . Cleanup ( func ( ) { storage . Close ( ) } )
2022-02-02 02:07:23 +00:00
api := & API {
2023-08-18 18:48:59 +00:00
Queryable : storage ,
QueryEngine : testEngine ,
2022-02-02 02:07:23 +00:00
now : func ( ) time . Time {
return time . Unix ( 123 , 0 )
} ,
}
2022-02-10 14:17:05 +00:00
request := func ( method , param string ) ( * http . Request , error ) {
2022-02-02 02:07:23 +00:00
u , err := url . Parse ( "http://example.com" )
require . NoError ( t , err )
q := u . Query ( )
q . Add ( "stats" , param )
q . Add ( "query" , "up" )
q . Add ( "start" , "0" )
q . Add ( "end" , "100" )
q . Add ( "step" , "10" )
u . RawQuery = q . Encode ( )
r , err := http . NewRequest ( method , u . String ( ) , nil )
if method == http . MethodPost {
r . Header . Set ( "Content-Type" , "application/x-www-form-urlencoded" )
}
return r , err
}
for _ , tc := range [ ] struct {
name string
2022-02-10 14:17:05 +00:00
renderer StatsRenderer
2022-02-02 02:07:23 +00:00
param string
expected func ( * testing . T , interface { } )
} {
{
name : "stats is blank" ,
param : "" ,
expected : func ( t * testing . T , i interface { } ) {
2023-12-07 11:35:01 +00:00
require . IsType ( t , & QueryData { } , i )
2023-02-02 04:29:13 +00:00
qd := i . ( * QueryData )
2022-02-02 02:07:23 +00:00
require . Nil ( t , qd . Stats )
} ,
} ,
{
name : "stats is true" ,
param : "true" ,
expected : func ( t * testing . T , i interface { } ) {
2023-12-07 11:35:01 +00:00
require . IsType ( t , & QueryData { } , i )
2023-02-02 04:29:13 +00:00
qd := i . ( * QueryData )
2022-02-02 02:07:23 +00:00
require . NotNil ( t , qd . Stats )
2022-02-10 14:17:05 +00:00
qs := qd . Stats . Builtin ( )
2022-02-02 02:07:23 +00:00
require . NotNil ( t , qs . Timings )
require . Greater ( t , qs . Timings . EvalTotalTime , float64 ( 0 ) )
require . NotNil ( t , qs . Samples )
require . NotNil ( t , qs . Samples . TotalQueryableSamples )
require . Nil ( t , qs . Samples . TotalQueryableSamplesPerStep )
} ,
} ,
{
name : "stats is all" ,
param : "all" ,
expected : func ( t * testing . T , i interface { } ) {
2023-12-07 11:35:01 +00:00
require . IsType ( t , & QueryData { } , i )
2023-02-02 04:29:13 +00:00
qd := i . ( * QueryData )
2022-02-02 02:07:23 +00:00
require . NotNil ( t , qd . Stats )
2022-02-10 14:17:05 +00:00
qs := qd . Stats . Builtin ( )
2022-02-02 02:07:23 +00:00
require . NotNil ( t , qs . Timings )
require . Greater ( t , qs . Timings . EvalTotalTime , float64 ( 0 ) )
require . NotNil ( t , qs . Samples )
require . NotNil ( t , qs . Samples . TotalQueryableSamples )
require . NotNil ( t , qs . Samples . TotalQueryableSamplesPerStep )
} ,
} ,
2022-02-10 14:17:05 +00:00
{
name : "custom handler with known value" ,
renderer : func ( ctx context . Context , s * stats . Statistics , p string ) stats . QueryStats {
if p == "known" {
return testStats { "Custom Value" }
}
return nil
} ,
param : "known" ,
expected : func ( t * testing . T , i interface { } ) {
2023-12-07 11:35:01 +00:00
require . IsType ( t , & QueryData { } , i )
2023-02-02 04:29:13 +00:00
qd := i . ( * QueryData )
2022-02-10 14:17:05 +00:00
require . NotNil ( t , qd . Stats )
j , err := json . Marshal ( qd . Stats )
require . NoError ( t , err )
2023-12-07 11:35:01 +00:00
require . JSONEq ( t , ` { "custom":"Custom Value"} ` , string ( j ) )
2022-02-10 14:17:05 +00:00
} ,
} ,
2022-02-02 02:07:23 +00:00
} {
t . Run ( tc . name , func ( t * testing . T ) {
2022-02-10 14:17:05 +00:00
before := api . statsRenderer
defer func ( ) { api . statsRenderer = before } ( )
api . statsRenderer = tc . renderer
2022-02-02 02:07:23 +00:00
for _ , method := range [ ] string { http . MethodGet , http . MethodPost } {
ctx := context . Background ( )
req , err := request ( method , tc . param )
require . NoError ( t , err )
res := api . query ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , "" )
tc . expected ( t , res . data )
res = api . queryRange ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , "" )
tc . expected ( t , res . data )
}
} )
}
}
2019-12-04 19:33:01 +00:00
func setupTestTargetRetriever ( t * testing . T ) * testTargetRetriever {
t . Helper ( )
targets := [ ] * testTargetParams {
{
Identifier : "test" ,
Labels : labels . FromMap ( map [ string ] string {
2021-08-31 15:37:32 +00:00
model . SchemeLabel : "http" ,
model . AddressLabel : "example.com:8080" ,
model . MetricsPathLabel : "/metrics" ,
model . JobLabel : "test" ,
model . ScrapeIntervalLabel : "15s" ,
model . ScrapeTimeoutLabel : "5s" ,
2019-12-04 19:33:01 +00:00
} ) ,
2022-02-27 14:19:21 +00:00
DiscoveredLabels : labels . EmptyLabels ( ) ,
2019-12-04 19:33:01 +00:00
Params : url . Values { } ,
Reports : [ ] * testReport { { scrapeStart , 70 * time . Millisecond , nil } } ,
Active : true ,
} ,
{
Identifier : "blackbox" ,
Labels : labels . FromMap ( map [ string ] string {
2021-08-31 15:37:32 +00:00
model . SchemeLabel : "http" ,
model . AddressLabel : "localhost:9115" ,
model . MetricsPathLabel : "/probe" ,
model . JobLabel : "blackbox" ,
model . ScrapeIntervalLabel : "20s" ,
model . ScrapeTimeoutLabel : "10s" ,
2019-12-04 19:33:01 +00:00
} ) ,
2022-02-27 14:19:21 +00:00
DiscoveredLabels : labels . EmptyLabels ( ) ,
2019-12-04 19:33:01 +00:00
Params : url . Values { "target" : [ ] string { "example.com" } } ,
Reports : [ ] * testReport { { scrapeStart , 100 * time . Millisecond , errors . New ( "failed" ) } } ,
Active : true ,
} ,
{
Identifier : "blackbox" ,
2022-02-27 14:19:21 +00:00
Labels : labels . EmptyLabels ( ) ,
2019-12-04 19:33:01 +00:00
DiscoveredLabels : labels . FromMap ( map [ string ] string {
2021-08-31 15:37:32 +00:00
model . SchemeLabel : "http" ,
model . AddressLabel : "http://dropped.example.com:9115" ,
model . MetricsPathLabel : "/probe" ,
model . JobLabel : "blackbox" ,
model . ScrapeIntervalLabel : "30s" ,
model . ScrapeTimeoutLabel : "15s" ,
2019-12-04 19:33:01 +00:00
} ) ,
Params : url . Values { } ,
Active : false ,
} ,
}
2019-12-10 14:56:16 +00:00
return newTestTargetRetriever ( targets )
2019-12-04 19:33:01 +00:00
}
2018-06-16 17:26:37 +00:00
func setupRemote ( s storage . Storage ) * httptest . Server {
handler := http . HandlerFunc ( func ( w http . ResponseWriter , r * http . Request ) {
req , err := remote . DecodeReadRequest ( r )
if err != nil {
http . Error ( w , err . Error ( ) , http . StatusBadRequest )
return
}
resp := prompb . ReadResponse {
Results : make ( [ ] * prompb . QueryResult , len ( req . Queries ) ) ,
}
for i , query := range req . Queries {
2019-08-19 20:16:10 +00:00
matchers , err := remote . FromLabelMatchers ( query . Matchers )
2018-06-16 17:26:37 +00:00
if err != nil {
http . Error ( w , err . Error ( ) , http . StatusBadRequest )
return
}
2020-03-12 09:36:09 +00:00
var hints * storage . SelectHints
2019-08-19 20:16:10 +00:00
if query . Hints != nil {
2020-03-12 09:36:09 +00:00
hints = & storage . SelectHints {
2019-08-19 20:16:10 +00:00
Start : query . Hints . StartMs ,
End : query . Hints . EndMs ,
Step : query . Hints . StepMs ,
Func : query . Hints . Func ,
}
}
2023-09-12 10:37:38 +00:00
querier , err := s . Querier ( query . StartTimestampMs , query . EndTimestampMs )
2018-06-16 17:26:37 +00:00
if err != nil {
http . Error ( w , err . Error ( ) , http . StatusInternalServerError )
return
}
defer querier . Close ( )
2023-09-12 10:37:38 +00:00
set := querier . Select ( r . Context ( ) , false , hints , matchers ... )
2020-06-09 16:57:31 +00:00
resp . Results [ i ] , _ , err = remote . ToQueryResult ( set , 1e6 )
2018-06-16 17:26:37 +00:00
if err != nil {
http . Error ( w , err . Error ( ) , http . StatusInternalServerError )
return
}
}
if err := remote . EncodeReadResponse ( & resp , w ) ; err != nil {
http . Error ( w , err . Error ( ) , http . StatusInternalServerError )
return
}
} )
return httptest . NewServer ( handler )
}
2021-03-16 09:47:45 +00:00
func testEndpoints ( t * testing . T , api * API , tr * testTargetRetriever , es storage . ExemplarStorage , testLabelAPI bool ) {
2016-12-30 09:43:44 +00:00
start := time . Unix ( 0 , 0 )
2019-12-10 14:56:16 +00:00
type targetMetadata struct {
identifier string
metadata [ ] scrape . MetricMetadata
}
2018-06-16 17:26:37 +00:00
type test struct {
2023-06-12 15:17:20 +00:00
endpoint apiFunc
params map [ string ] string
query url . Values
response interface { }
responseLen int
responseMetadataTotal int
errType errorType
sorter func ( interface { } )
metadata [ ] targetMetadata
exemplars [ ] exemplar . QueryResult
2023-10-18 02:02:03 +00:00
zeroFunc func ( interface { } )
}
rulesZeroFunc := func ( i interface { } ) {
if i != nil {
v := i . ( * RuleDiscovery )
for _ , ruleGroup := range v . RuleGroups {
ruleGroup . EvaluationTime = float64 ( 0 )
ruleGroup . LastEvaluation = time . Time { }
for k , rule := range ruleGroup . Rules {
switch r := rule . ( type ) {
case AlertingRule :
r . LastEvaluation = time . Time { }
r . EvaluationTime = float64 ( 0 )
r . LastError = ""
r . Health = "ok"
for _ , alert := range r . Alerts {
alert . ActiveAt = nil
}
ruleGroup . Rules [ k ] = r
case RecordingRule :
r . LastEvaluation = time . Time { }
r . EvaluationTime = float64 ( 0 )
r . LastError = ""
r . Health = "ok"
ruleGroup . Rules [ k ] = r
}
}
}
}
2018-06-16 17:26:37 +00:00
}
2021-10-22 08:06:44 +00:00
tests := [ ] test {
2015-06-04 16:07:57 +00:00
{
endpoint : api . query ,
query : url . Values {
"query" : [ ] string { "2" } ,
2016-12-30 09:43:44 +00:00
"time" : [ ] string { "123.4" } ,
2015-06-04 16:07:57 +00:00
} ,
2023-02-02 04:29:13 +00:00
response : & QueryData {
2020-02-03 18:23:07 +00:00
ResultType : parser . ValueTypeScalar ,
2016-12-30 09:43:44 +00:00
Result : promql . Scalar {
V : 2 ,
T : timestamp . FromTime ( start . Add ( 123 * time . Second + 400 * time . Millisecond ) ) ,
2015-06-04 16:07:57 +00:00
} ,
} ,
} ,
{
endpoint : api . query ,
query : url . Values {
"query" : [ ] string { "0.333" } ,
"time" : [ ] string { "1970-01-01T00:02:03Z" } ,
} ,
2023-02-02 04:29:13 +00:00
response : & QueryData {
2020-02-03 18:23:07 +00:00
ResultType : parser . ValueTypeScalar ,
2016-12-30 09:43:44 +00:00
Result : promql . Scalar {
V : 0.333 ,
T : timestamp . FromTime ( start . Add ( 123 * time . Second ) ) ,
2015-06-04 16:07:57 +00:00
} ,
} ,
} ,
{
endpoint : api . query ,
query : url . Values {
"query" : [ ] string { "0.333" } ,
"time" : [ ] string { "1970-01-01T01:02:03+01:00" } ,
} ,
2023-02-02 04:29:13 +00:00
response : & QueryData {
2020-02-03 18:23:07 +00:00
ResultType : parser . ValueTypeScalar ,
2016-12-30 09:43:44 +00:00
Result : promql . Scalar {
V : 0.333 ,
T : timestamp . FromTime ( start . Add ( 123 * time . Second ) ) ,
2015-06-04 16:07:57 +00:00
} ,
} ,
} ,
2015-11-11 19:46:57 +00:00
{
endpoint : api . query ,
query : url . Values {
"query" : [ ] string { "0.333" } ,
} ,
2023-02-02 04:29:13 +00:00
response : & QueryData {
2020-02-03 18:23:07 +00:00
ResultType : parser . ValueTypeScalar ,
2016-12-30 09:43:44 +00:00
Result : promql . Scalar {
V : 0.333 ,
2018-06-16 17:26:37 +00:00
T : timestamp . FromTime ( api . now ( ) ) ,
2015-11-11 19:46:57 +00:00
} ,
} ,
} ,
2015-06-09 11:44:49 +00:00
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "2" } ,
"step" : [ ] string { "1" } ,
} ,
2023-02-02 04:29:13 +00:00
response : & QueryData {
2020-02-03 18:23:07 +00:00
ResultType : parser . ValueTypeMatrix ,
2016-12-30 09:43:44 +00:00
Result : promql . Matrix {
promql . Series {
promql: Separate `Point` into `FPoint` and `HPoint`
In other words: Instead of having a “polymorphous” `Point` that can
either contain a float value or a histogram value, use an `FPoint` for
floats and an `HPoint` for histograms.
This seemingly small change has a _lot_ of repercussions throughout
the codebase.
The idea here is to avoid the increase in size of `Point` arrays that
happened after native histograms had been added.
The higher-level data structures (`Sample`, `Series`, etc.) are still
“polymorphous”. The same idea could be applied to them, but at each
step the trade-offs needed to be evaluated.
The idea with this change is to do the minimum necessary to get back
to pre-histogram performance for functions that do not touch
histograms. Here are comparisons for the `changes` function. The test
data doesn't include histograms yet. Ideally, there would be no change
in the benchmark result at all.
First runtime v2.39 compared to directly prior to this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 542µs ± 1% +38.58% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 617µs ± 2% +36.48% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.36ms ± 2% +21.58% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 8.94ms ± 1% +14.21% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.30ms ± 1% +10.67% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.10ms ± 1% +11.82% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 11.8ms ± 1% +12.50% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 87.4ms ± 1% +12.63% (p=0.000 n=9+9)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 32.8ms ± 1% +8.01% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.6ms ± 2% +9.64% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 117ms ± 1% +11.69% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 876ms ± 1% +11.83% (p=0.000 n=9+10)
```
And then runtime v2.39 compared to after this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 547µs ± 1% +39.84% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 616µs ± 2% +36.15% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.26ms ± 1% +12.20% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 7.95ms ± 1% +1.59% (p=0.000 n=10+8)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.38ms ± 2% +13.49% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.02ms ± 1% +9.80% (p=0.000 n=10+9)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 10.8ms ± 1% +3.08% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 78.1ms ± 1% +0.58% (p=0.035 n=9+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 33.5ms ± 4% +10.18% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.0ms ± 1% +7.98% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 107ms ± 1% +1.92% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 775ms ± 1% -1.02% (p=0.019 n=9+9)
```
In summary, the runtime doesn't really improve with this change for
queries with just a few steps. For queries with many steps, this
commit essentially reinstates the old performance. This is good
because the many-step queries are the one that matter most (longest
absolute runtime).
In terms of allocations, though, this commit doesn't make a dent at
all (numbers not shown). The reason is that most of the allocations
happen in the sampleRingIterator (in the storage package), which has
to be addressed in a separate commit.
Signed-off-by: beorn7 <beorn@grafana.com>
2022-10-28 14:58:40 +00:00
Floats : [ ] promql . FPoint {
{ F : 0 , T : timestamp . FromTime ( start ) } ,
{ F : 1 , T : timestamp . FromTime ( start . Add ( 1 * time . Second ) ) } ,
{ F : 2 , T : timestamp . FromTime ( start . Add ( 2 * time . Second ) ) } ,
2015-06-09 11:44:49 +00:00
} ,
2022-02-27 14:19:21 +00:00
// No Metric returned - use zero value for comparison.
2015-06-09 11:44:49 +00:00
} ,
} ,
} ,
} ,
// Missing query params in range queries.
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"end" : [ ] string { "2" } ,
"step" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "0" } ,
"step" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "2" } ,
} ,
errType : errorBadData ,
} ,
// Bad query expression.
{
endpoint : api . query ,
query : url . Values {
"query" : [ ] string { "invalid][query" } ,
"time" : [ ] string { "1970-01-01T01:02:03+01:00" } ,
} ,
errType : errorBadData ,
} ,
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "invalid][query" } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "100" } ,
"step" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
2017-03-16 14:16:20 +00:00
// Invalid step.
2016-08-16 13:10:02 +00:00
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "1" } ,
"end" : [ ] string { "2" } ,
"step" : [ ] string { "0" } ,
} ,
errType : errorBadData ,
} ,
2017-03-16 14:16:20 +00:00
// Start after end.
2016-11-01 13:25:34 +00:00
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "2" } ,
"end" : [ ] string { "1" } ,
"step" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
2017-03-16 14:16:20 +00:00
// Start overflows int64 internally.
{
endpoint : api . queryRange ,
query : url . Values {
"query" : [ ] string { "time()" } ,
"start" : [ ] string { "148966367200.372" } ,
"end" : [ ] string { "1489667272.372" } ,
"step" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
2022-07-20 12:55:09 +00:00
{
endpoint : api . formatQuery ,
query : url . Values {
"query" : [ ] string { "foo+bar" } ,
} ,
response : "foo + bar" ,
} ,
{
endpoint : api . formatQuery ,
query : url . Values {
"query" : [ ] string { "invalid_expression/" } ,
} ,
errType : errorBadData ,
} ,
2015-06-09 14:09:31 +00:00
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
2015-06-09 14:09:31 +00:00
} ,
} ,
2020-12-15 17:24:57 +00:00
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` { foo=""} ` } ,
} ,
errType : errorBadData ,
} ,
2015-06-09 14:09:31 +00:00
{
endpoint : api . series ,
query : url . Values {
2015-11-05 10:23:43 +00:00
"match[]" : [ ] string { ` test_metric1 { foo=~".+o"} ` } ,
2015-06-09 14:09:31 +00:00
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric1" , "foo" , "boo" ) ,
2015-06-09 14:09:31 +00:00
} ,
} ,
{
endpoint : api . series ,
query : url . Values {
2016-12-30 09:43:44 +00:00
"match[]" : [ ] string { ` test_metric1 { foo=~".+o$"} ` , ` test_metric1 { foo=~".+o"} ` } ,
2015-06-09 14:09:31 +00:00
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric1" , "foo" , "boo" ) ,
2015-06-09 14:09:31 +00:00
} ,
} ,
2020-08-28 23:21:39 +00:00
// Try to overlap the selected series set as much as possible to test the result de-duplication works well.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric4 { foo=~".+o$"} ` , ` test_metric4 { dup=~"^1"} ` } ,
} ,
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric4" , "dup" , "1" , "foo" , "bar" ) ,
labels . FromStrings ( "__name__" , "test_metric4" , "dup" , "1" , "foo" , "boo" ) ,
labels . FromStrings ( "__name__" , "test_metric4" , "foo" , "boo" ) ,
} ,
} ,
2015-06-09 14:09:31 +00:00
{
endpoint : api . series ,
query : url . Values {
2015-11-05 10:23:43 +00:00
"match[]" : [ ] string { ` test_metric1 { foo=~".+o"} ` , ` none ` } ,
2015-06-09 14:09:31 +00:00
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric1" , "foo" , "boo" ) ,
2015-06-09 14:09:31 +00:00
} ,
} ,
2016-05-11 21:59:52 +00:00
// Start and end before series starts.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "-2" } ,
"end" : [ ] string { "-1" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels { } ,
2016-05-11 21:59:52 +00:00
} ,
// Start and end after series ends.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "100000" } ,
"end" : [ ] string { "100001" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels { } ,
2016-05-11 21:59:52 +00:00
} ,
// Start before series starts, end after series ends.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "-1" } ,
"end" : [ ] string { "100000" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
2016-05-11 21:59:52 +00:00
} ,
} ,
// Start and end within series.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
2016-05-11 21:59:52 +00:00
} ,
} ,
// Start within series, end after.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100000" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
2016-05-11 21:59:52 +00:00
} ,
} ,
// Start before series, end within series.
{
endpoint : api . series ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "-1" } ,
"end" : [ ] string { "1" } ,
} ,
2016-12-30 09:43:44 +00:00
response : [ ] labels . Labels {
labels . FromStrings ( "__name__" , "test_metric2" , "foo" , "boo" ) ,
2016-05-11 21:59:52 +00:00
} ,
} ,
2015-06-09 14:09:31 +00:00
// Missing match[] query params in series requests.
{
endpoint : api . series ,
errType : errorBadData ,
} ,
{
endpoint : api . dropSeries ,
2017-07-06 12:38:40 +00:00
errType : errorInternal ,
2015-06-09 14:09:31 +00:00
} ,
2017-05-11 15:09:24 +00:00
{
2016-12-02 12:31:43 +00:00
endpoint : api . targets ,
2017-01-13 16:15:04 +00:00
response : & TargetDiscovery {
2018-10-25 08:19:20 +00:00
ActiveTargets : [ ] * Target {
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "blackbox" ,
2018-09-26 09:20:56 +00:00
} ,
2019-11-11 21:42:24 +00:00
ScrapePool : "blackbox" ,
ScrapeURL : "http://localhost:9115/probe?target=example.com" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://localhost:9115/probe?target=example.com" ,
2019-11-11 21:42:24 +00:00
Health : "down" ,
2020-02-17 17:19:15 +00:00
LastError : "failed: missing port in address" ,
2019-11-11 21:42:24 +00:00
LastScrape : scrapeStart ,
LastScrapeDuration : 0.1 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "20s" ,
ScrapeTimeout : "10s" ,
2018-10-25 08:19:20 +00:00
} ,
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "test" ,
} ,
2019-11-11 21:42:24 +00:00
ScrapePool : "test" ,
ScrapeURL : "http://example.com:8080/metrics" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://example.com:8080/metrics" ,
2019-11-11 21:42:24 +00:00
Health : "up" ,
LastError : "" ,
LastScrape : scrapeStart ,
LastScrapeDuration : 0.07 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "15s" ,
ScrapeTimeout : "5s" ,
2019-11-11 21:42:24 +00:00
} ,
} ,
DroppedTargets : [ ] * DroppedTarget {
{
DiscoveredLabels : map [ string ] string {
2021-08-31 15:37:32 +00:00
"__address__" : "http://dropped.example.com:9115" ,
"__metrics_path__" : "/probe" ,
"__scheme__" : "http" ,
"job" : "blackbox" ,
"__scrape_interval__" : "30s" ,
"__scrape_timeout__" : "15s" ,
2019-11-11 21:42:24 +00:00
} ,
2017-01-13 16:15:04 +00:00
} ,
2016-12-02 12:31:43 +00:00
} ,
2023-08-14 14:39:25 +00:00
DroppedTargetCounts : map [ string ] int { "blackbox" : 1 } ,
2019-11-11 21:42:24 +00:00
} ,
} ,
{
endpoint : api . targets ,
query : url . Values {
"state" : [ ] string { "any" } ,
} ,
response : & TargetDiscovery {
ActiveTargets : [ ] * Target {
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "blackbox" ,
} ,
ScrapePool : "blackbox" ,
ScrapeURL : "http://localhost:9115/probe?target=example.com" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://localhost:9115/probe?target=example.com" ,
2019-11-11 21:42:24 +00:00
Health : "down" ,
2020-02-17 17:19:15 +00:00
LastError : "failed: missing port in address" ,
2019-11-11 21:42:24 +00:00
LastScrape : scrapeStart ,
LastScrapeDuration : 0.1 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "20s" ,
ScrapeTimeout : "10s" ,
2019-11-11 21:42:24 +00:00
} ,
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "test" ,
} ,
ScrapePool : "test" ,
ScrapeURL : "http://example.com:8080/metrics" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://example.com:8080/metrics" ,
2019-11-11 21:42:24 +00:00
Health : "up" ,
LastError : "" ,
LastScrape : scrapeStart ,
LastScrapeDuration : 0.07 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "15s" ,
ScrapeTimeout : "5s" ,
2019-11-11 21:42:24 +00:00
} ,
} ,
DroppedTargets : [ ] * DroppedTarget {
{
DiscoveredLabels : map [ string ] string {
2021-08-31 15:37:32 +00:00
"__address__" : "http://dropped.example.com:9115" ,
"__metrics_path__" : "/probe" ,
"__scheme__" : "http" ,
"job" : "blackbox" ,
"__scrape_interval__" : "30s" ,
"__scrape_timeout__" : "15s" ,
2019-11-11 21:42:24 +00:00
} ,
} ,
} ,
2023-08-14 14:39:25 +00:00
DroppedTargetCounts : map [ string ] int { "blackbox" : 1 } ,
2019-11-11 21:42:24 +00:00
} ,
} ,
{
endpoint : api . targets ,
query : url . Values {
"state" : [ ] string { "active" } ,
} ,
response : & TargetDiscovery {
ActiveTargets : [ ] * Target {
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "blackbox" ,
} ,
ScrapePool : "blackbox" ,
ScrapeURL : "http://localhost:9115/probe?target=example.com" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://localhost:9115/probe?target=example.com" ,
2019-11-11 21:42:24 +00:00
Health : "down" ,
2020-02-17 17:19:15 +00:00
LastError : "failed: missing port in address" ,
2019-11-11 21:42:24 +00:00
LastScrape : scrapeStart ,
LastScrapeDuration : 0.1 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "20s" ,
ScrapeTimeout : "10s" ,
2019-11-11 21:42:24 +00:00
} ,
{
DiscoveredLabels : map [ string ] string { } ,
Labels : map [ string ] string {
"job" : "test" ,
} ,
ScrapePool : "test" ,
ScrapeURL : "http://example.com:8080/metrics" ,
2020-02-17 17:19:15 +00:00
GlobalURL : "http://example.com:8080/metrics" ,
2019-11-11 21:42:24 +00:00
Health : "up" ,
LastError : "" ,
LastScrape : scrapeStart ,
LastScrapeDuration : 0.07 ,
2021-08-31 15:37:32 +00:00
ScrapeInterval : "15s" ,
ScrapeTimeout : "5s" ,
2019-11-11 21:42:24 +00:00
} ,
} ,
DroppedTargets : [ ] * DroppedTarget { } ,
} ,
} ,
{
endpoint : api . targets ,
query : url . Values {
"state" : [ ] string { "Dropped" } ,
} ,
response : & TargetDiscovery {
ActiveTargets : [ ] * Target { } ,
2018-10-25 08:19:20 +00:00
DroppedTargets : [ ] * DroppedTarget {
{
DiscoveredLabels : map [ string ] string {
2021-08-31 15:37:32 +00:00
"__address__" : "http://dropped.example.com:9115" ,
"__metrics_path__" : "/probe" ,
"__scheme__" : "http" ,
"job" : "blackbox" ,
"__scrape_interval__" : "30s" ,
"__scrape_timeout__" : "15s" ,
2018-02-21 17:26:18 +00:00
} ,
} ,
} ,
2023-08-14 14:39:25 +00:00
DroppedTargetCounts : map [ string ] int { "blackbox" : 1 } ,
2016-12-02 12:31:43 +00:00
} ,
2017-05-11 15:09:24 +00:00
} ,
2019-12-04 19:33:01 +00:00
// With a matching metric.
{
endpoint : api . targetMetadata ,
query : url . Values {
"metric" : [ ] string { "go_threads" } ,
} ,
2019-12-10 14:56:16 +00:00
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created." ,
Unit : "" ,
} ,
} ,
} ,
} ,
2019-12-04 19:33:01 +00:00
response : [ ] metricMetadata {
{
Target : labels . FromMap ( map [ string ] string {
"job" : "test" ,
} ) ,
Help : "Number of OS threads created." ,
Type : textparse . MetricTypeGauge ,
Unit : "" ,
} ,
} ,
} ,
// With a matching target.
{
endpoint : api . targetMetadata ,
query : url . Values {
"match_target" : [ ] string { "{job=\"blackbox\"}" } ,
} ,
2019-12-10 14:56:16 +00:00
metadata : [ ] targetMetadata {
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "prometheus_tsdb_storage_blocks_bytes" ,
Type : textparse . MetricTypeGauge ,
Help : "The number of bytes that are currently used for local storage by all blocks." ,
Unit : "" ,
} ,
} ,
} ,
} ,
2019-12-04 19:33:01 +00:00
response : [ ] metricMetadata {
{
Target : labels . FromMap ( map [ string ] string {
"job" : "blackbox" ,
} ) ,
Metric : "prometheus_tsdb_storage_blocks_bytes" ,
Help : "The number of bytes that are currently used for local storage by all blocks." ,
Type : textparse . MetricTypeGauge ,
Unit : "" ,
} ,
} ,
} ,
// Without a target or metric.
{
endpoint : api . targetMetadata ,
2019-12-10 14:56:16 +00:00
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created." ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "prometheus_tsdb_storage_blocks_bytes" ,
Type : textparse . MetricTypeGauge ,
Help : "The number of bytes that are currently used for local storage by all blocks." ,
Unit : "" ,
} ,
} ,
} ,
} ,
2019-12-04 19:33:01 +00:00
response : [ ] metricMetadata {
{
Target : labels . FromMap ( map [ string ] string {
"job" : "test" ,
} ) ,
Metric : "go_threads" ,
Help : "Number of OS threads created." ,
Type : textparse . MetricTypeGauge ,
Unit : "" ,
} ,
{
Target : labels . FromMap ( map [ string ] string {
"job" : "blackbox" ,
} ) ,
Metric : "prometheus_tsdb_storage_blocks_bytes" ,
Help : "The number of bytes that are currently used for local storage by all blocks." ,
Type : textparse . MetricTypeGauge ,
Unit : "" ,
} ,
} ,
2019-12-09 21:36:38 +00:00
sorter : func ( m interface { } ) {
sort . Slice ( m . ( [ ] metricMetadata ) , func ( i , j int ) bool {
s := m . ( [ ] metricMetadata )
return s [ i ] . Metric < s [ j ] . Metric
} )
} ,
2019-12-04 19:33:01 +00:00
} ,
// Without a matching metric.
{
endpoint : api . targetMetadata ,
query : url . Values {
"match_target" : [ ] string { "{job=\"non-existentblackbox\"}" } ,
} ,
2019-12-10 14:56:16 +00:00
response : [ ] metricMetadata { } ,
2019-12-04 19:33:01 +00:00
} ,
2015-06-09 14:09:31 +00:00
{
2017-01-13 09:20:11 +00:00
endpoint : api . alertmanagers ,
response : & AlertmanagerDiscovery {
ActiveAlertmanagers : [ ] * AlertmanagerTarget {
2017-04-05 13:24:22 +00:00
{
2017-01-13 09:20:11 +00:00
URL : "http://alertmanager.example.com:8080/api/v1/alerts" ,
} ,
} ,
2018-02-21 09:00:07 +00:00
DroppedAlertmanagers : [ ] * AlertmanagerTarget {
{
URL : "http://dropped.alertmanager.example.com:8080/api/v1/alerts" ,
} ,
} ,
2017-01-13 09:20:11 +00:00
} ,
2015-06-09 14:09:31 +00:00
} ,
2019-12-10 14:56:16 +00:00
// With metadata available.
{
endpoint : api . metricMetadata ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "prometheus_engine_query_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "Query timings" ,
Unit : "" ,
} ,
{
Metric : "go_info" ,
Type : textparse . MetricTypeGauge ,
Help : "Information about the Go environment." ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata {
"prometheus_engine_query_duration_seconds" : { { textparse . MetricTypeSummary , "Query timings" , "" } } ,
"go_info" : { { textparse . MetricTypeGauge , "Information about the Go environment." , "" } } ,
} ,
} ,
// With duplicate metadata for a metric that comes from different targets.
{
endpoint : api . metricMetadata ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata {
"go_threads" : { { textparse . MetricTypeGauge , "Number of OS threads created" , "" } } ,
} ,
} ,
// With non-duplicate metadata for the same metric from different targets.
{
endpoint : api . metricMetadata ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads that were created." ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata {
2020-06-22 14:29:35 +00:00
"go_threads" : {
2019-12-10 14:56:16 +00:00
{ textparse . MetricTypeGauge , "Number of OS threads created" , "" } ,
{ textparse . MetricTypeGauge , "Number of OS threads that were created." , "" } ,
} ,
} ,
sorter : func ( m interface { } ) {
v := m . ( map [ string ] [ ] metadata ) [ "go_threads" ]
sort . Slice ( v , func ( i , j int ) bool {
return v [ i ] . Help < v [ j ] . Help
} )
} ,
} ,
2019-12-10 15:15:13 +00:00
// With a limit for the number of metrics returned.
2019-12-10 14:56:16 +00:00
{
endpoint : api . metricMetadata ,
query : url . Values {
"limit" : [ ] string { "2" } ,
} ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
{
Metric : "prometheus_engine_query_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "Query Timmings." ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations." ,
Unit : "" ,
} ,
} ,
} ,
} ,
responseLen : 2 ,
} ,
2023-06-12 15:17:20 +00:00
// With a limit for the number of metadata per metric.
{
endpoint : api . metricMetadata ,
query : url . Values { "limit_per_metric" : [ ] string { "1" } } ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Repeated metadata" ,
Unit : "" ,
} ,
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations." ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata {
"go_threads" : {
{ textparse . MetricTypeGauge , "Number of OS threads created" , "" } ,
} ,
"go_gc_duration_seconds" : {
{ textparse . MetricTypeSummary , "A summary of the GC invocation durations." , "" } ,
} ,
} ,
} ,
// With a limit for the number of metadata per metric and per metric.
{
endpoint : api . metricMetadata ,
query : url . Values { "limit_per_metric" : [ ] string { "1" } , "limit" : [ ] string { "1" } } ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Repeated metadata" ,
Unit : "" ,
} ,
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations." ,
Unit : "" ,
} ,
} ,
} ,
} ,
responseLen : 1 ,
responseMetadataTotal : 1 ,
} ,
// With a limit for the number of metadata per metric and per metric, while having multiple targets.
{
endpoint : api . metricMetadata ,
query : url . Values { "limit_per_metric" : [ ] string { "1" } , "limit" : [ ] string { "1" } } ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Repeated metadata" ,
Unit : "" ,
} ,
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations." ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "secondTarget" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created, but from a different target" ,
Unit : "" ,
} ,
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations, but from a different target." ,
Unit : "" ,
} ,
} ,
} ,
} ,
responseLen : 1 ,
responseMetadataTotal : 1 ,
} ,
2019-12-10 15:22:10 +00:00
// When requesting a specific metric that is present.
{
endpoint : api . metricMetadata ,
query : url . Values { "metric" : [ ] string { "go_threads" } } ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
} ,
} ,
{
identifier : "blackbox" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_gc_duration_seconds" ,
Type : textparse . MetricTypeSummary ,
Help : "A summary of the GC invocation durations." ,
Unit : "" ,
} ,
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads that were created." ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata {
2020-06-22 14:29:35 +00:00
"go_threads" : {
2019-12-10 15:22:10 +00:00
{ textparse . MetricTypeGauge , "Number of OS threads created" , "" } ,
{ textparse . MetricTypeGauge , "Number of OS threads that were created." , "" } ,
} ,
} ,
sorter : func ( m interface { } ) {
v := m . ( map [ string ] [ ] metadata ) [ "go_threads" ]
sort . Slice ( v , func ( i , j int ) bool {
return v [ i ] . Help < v [ j ] . Help
} )
} ,
} ,
// With a specific metric that is not present.
{
endpoint : api . metricMetadata ,
query : url . Values { "metric" : [ ] string { "go_gc_duration_seconds" } } ,
metadata : [ ] targetMetadata {
{
identifier : "test" ,
metadata : [ ] scrape . MetricMetadata {
{
Metric : "go_threads" ,
Type : textparse . MetricTypeGauge ,
Help : "Number of OS threads created" ,
Unit : "" ,
} ,
} ,
} ,
} ,
response : map [ string ] [ ] metadata { } ,
} ,
2019-12-10 15:15:13 +00:00
// With no available metadata.
2019-12-10 14:56:16 +00:00
{
endpoint : api . metricMetadata ,
response : map [ string ] [ ] metadata { } ,
} ,
2017-05-11 15:09:24 +00:00
{
endpoint : api . serveConfig ,
response : & prometheusConfig {
YAML : samplePrometheusCfg . String ( ) ,
} ,
} ,
api: Added v1/status/flags endpoint. (#3864)
Endpoint URL: /api/v1/status/flags
Example Output:
```json
{
"status": "success",
"data": {
"alertmanager.notification-queue-capacity": "10000",
"alertmanager.timeout": "10s",
"completion-bash": "false",
"completion-script-bash": "false",
"completion-script-zsh": "false",
"config.file": "my_cool_prometheus.yaml",
"help": "false",
"help-long": "false",
"help-man": "false",
"log.level": "info",
"query.lookback-delta": "5m",
"query.max-concurrency": "20",
"query.timeout": "2m",
"storage.tsdb.max-block-duration": "36h",
"storage.tsdb.min-block-duration": "2h",
"storage.tsdb.no-lockfile": "false",
"storage.tsdb.path": "data/",
"storage.tsdb.retention": "15d",
"version": "false",
"web.console.libraries": "console_libraries",
"web.console.templates": "consoles",
"web.enable-admin-api": "false",
"web.enable-lifecycle": "false",
"web.external-url": "",
"web.listen-address": "0.0.0.0:9090",
"web.max-connections": "512",
"web.read-timeout": "5m",
"web.route-prefix": "/",
"web.user-assets": ""
}
}
```
Signed-off-by: Bartek Plotka <bwplotka@gmail.com>
2018-02-21 08:49:02 +00:00
{
endpoint : api . serveFlags ,
response : sampleFlagMap ,
} ,
2018-03-25 16:50:34 +00:00
{
endpoint : api . alerts ,
response : & AlertDiscovery {
2023-10-18 02:02:03 +00:00
Alerts : [ ] * Alert {
{
Labels : labels . FromStrings ( "alertname" , "test_metric5" , "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
State : "pending" ,
Value : "1e+00" ,
} ,
} ,
} ,
zeroFunc : func ( i interface { } ) {
if i != nil {
v := i . ( * AlertDiscovery )
for _ , alert := range v . Alerts {
alert . ActiveAt = nil
}
}
2018-03-25 16:50:34 +00:00
} ,
} ,
{
endpoint : api . rules ,
2018-06-27 07:15:17 +00:00
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
2018-03-25 16:50:34 +00:00
{
2018-06-27 07:15:17 +00:00
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
2022-01-12 03:44:22 +00:00
Limit : 0 ,
2021-11-21 17:00:27 +00:00
Rules : [ ] Rule {
AlertingRule {
2019-12-09 22:42:59 +00:00
State : "inactive" ,
2018-06-27 07:15:17 +00:00
Name : "test_metric3" ,
Query : "absent(test_metric3) != 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2018-06-27 07:15:17 +00:00
Type : "alerting" ,
} ,
2021-11-21 17:00:27 +00:00
AlertingRule {
2019-12-09 22:42:59 +00:00
State : "inactive" ,
2018-06-27 07:15:17 +00:00
Name : "test_metric4" ,
Query : "up == 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
Type : "alerting" ,
} ,
AlertingRule {
State : "pending" ,
Name : "test_metric5" ,
Query : "vector(1)" ,
Duration : 1 ,
Labels : labels . FromStrings ( "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert {
{
Labels : labels . FromStrings ( "alertname" , "test_metric5" , "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
State : "pending" ,
Value : "1e+00" ,
} ,
} ,
Health : "ok" ,
Type : "alerting" ,
} ,
RecordingRule {
Name : "recording-rule-1" ,
Query : "vector(1)" ,
Labels : labels . Labels { } ,
Health : "ok" ,
Type : "recording" ,
} ,
} ,
} ,
} ,
} ,
zeroFunc : rulesZeroFunc ,
} ,
{
endpoint : api . rules ,
query : url . Values {
"exclude_alerts" : [ ] string { "true" } ,
} ,
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
{
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
Limit : 0 ,
Rules : [ ] Rule {
AlertingRule {
State : "inactive" ,
Name : "test_metric3" ,
Query : "absent(test_metric3) != 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : nil ,
Health : "ok" ,
Type : "alerting" ,
} ,
AlertingRule {
State : "inactive" ,
Name : "test_metric4" ,
Query : "up == 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : nil ,
Health : "ok" ,
Type : "alerting" ,
} ,
AlertingRule {
State : "pending" ,
Name : "test_metric5" ,
Query : "vector(1)" ,
Duration : 1 ,
Labels : labels . FromStrings ( "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
Alerts : nil ,
Health : "ok" ,
2018-06-27 07:15:17 +00:00
Type : "alerting" ,
2018-03-25 16:50:34 +00:00
} ,
2021-11-21 17:00:27 +00:00
RecordingRule {
2018-06-27 07:15:17 +00:00
Name : "recording-rule-1" ,
Query : "vector(1)" ,
Labels : labels . Labels { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2018-06-27 07:15:17 +00:00
Type : "recording" ,
2018-03-25 16:50:34 +00:00
} ,
} ,
} ,
} ,
} ,
2023-10-18 02:02:03 +00:00
zeroFunc : rulesZeroFunc ,
2018-03-25 16:50:34 +00:00
} ,
2019-12-09 22:42:59 +00:00
{
endpoint : api . rules ,
query : url . Values {
"type" : [ ] string { "alert" } ,
} ,
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
{
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
2022-01-12 03:44:22 +00:00
Limit : 0 ,
2021-11-21 17:00:27 +00:00
Rules : [ ] Rule {
AlertingRule {
2019-12-09 22:42:59 +00:00
State : "inactive" ,
Name : "test_metric3" ,
Query : "absent(test_metric3) != 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2019-12-09 22:42:59 +00:00
Type : "alerting" ,
} ,
2021-11-21 17:00:27 +00:00
AlertingRule {
2019-12-09 22:42:59 +00:00
State : "inactive" ,
Name : "test_metric4" ,
Query : "up == 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2019-12-09 22:42:59 +00:00
Type : "alerting" ,
} ,
2023-10-18 02:02:03 +00:00
AlertingRule {
State : "pending" ,
Name : "test_metric5" ,
Query : "vector(1)" ,
Duration : 1 ,
Labels : labels . FromStrings ( "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert {
{
Labels : labels . FromStrings ( "alertname" , "test_metric5" , "name" , "tm5" ) ,
Annotations : labels . Labels { } ,
State : "pending" ,
Value : "1e+00" ,
} ,
} ,
Health : "ok" ,
Type : "alerting" ,
} ,
2019-12-09 22:42:59 +00:00
} ,
} ,
} ,
} ,
2023-10-18 02:02:03 +00:00
zeroFunc : rulesZeroFunc ,
2019-12-09 22:42:59 +00:00
} ,
{
endpoint : api . rules ,
query : url . Values {
"type" : [ ] string { "record" } ,
} ,
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
{
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
2022-01-12 03:44:22 +00:00
Limit : 0 ,
2021-11-21 17:00:27 +00:00
Rules : [ ] Rule {
RecordingRule {
2019-12-09 22:42:59 +00:00
Name : "recording-rule-1" ,
Query : "vector(1)" ,
Labels : labels . Labels { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2019-12-09 22:42:59 +00:00
Type : "recording" ,
} ,
} ,
} ,
} ,
} ,
2023-10-18 02:02:03 +00:00
zeroFunc : rulesZeroFunc ,
2019-12-09 22:42:59 +00:00
} ,
2023-04-18 09:07:32 +00:00
{
endpoint : api . rules ,
2023-04-18 15:26:21 +00:00
query : url . Values { "rule_name[]" : [ ] string { "test_metric4" } } ,
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
{
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
Limit : 0 ,
Rules : [ ] Rule {
AlertingRule {
State : "inactive" ,
Name : "test_metric4" ,
Query : "up == 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2023-04-18 15:26:21 +00:00
Type : "alerting" ,
} ,
} ,
} ,
} ,
} ,
2023-10-18 02:02:03 +00:00
zeroFunc : rulesZeroFunc ,
2023-04-18 15:26:21 +00:00
} ,
{
endpoint : api . rules ,
query : url . Values { "rule_group[]" : [ ] string { "respond-with-nothing" } } ,
2023-04-20 10:20:10 +00:00
response : & RuleDiscovery { RuleGroups : [ ] * RuleGroup { } } ,
2023-04-18 15:26:21 +00:00
} ,
{
endpoint : api . rules ,
query : url . Values { "file[]" : [ ] string { "/path/to/file" } , "rule_name[]" : [ ] string { "test_metric4" } } ,
2023-04-18 09:07:32 +00:00
response : & RuleDiscovery {
RuleGroups : [ ] * RuleGroup {
{
Name : "grp" ,
File : "/path/to/file" ,
Interval : 1 ,
Limit : 0 ,
Rules : [ ] Rule {
AlertingRule {
State : "inactive" ,
Name : "test_metric4" ,
Query : "up == 1" ,
Duration : 1 ,
Labels : labels . Labels { } ,
Annotations : labels . Labels { } ,
Alerts : [ ] * Alert { } ,
2023-10-18 02:02:03 +00:00
Health : "ok" ,
2023-04-18 09:07:32 +00:00
Type : "alerting" ,
} ,
} ,
2019-12-09 22:42:59 +00:00
} ,
} ,
} ,
2023-10-18 02:02:03 +00:00
zeroFunc : rulesZeroFunc ,
2019-12-09 22:42:59 +00:00
} ,
2021-03-16 09:47:45 +00:00
{
endpoint : api . queryExemplars ,
query : url . Values {
"query" : [ ] string { ` test_metric3 { foo="boo"} - test_metric4 { foo="bar"} ` } ,
"start" : [ ] string { "0" } ,
"end" : [ ] string { "4" } ,
} ,
// Note extra integer length of timestamps for exemplars because of millisecond preservation
// of timestamps within Prometheus (see timestamp package).
response : [ ] exemplar . QueryResult {
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 2 * time . Second ) ) ,
} ,
} ,
} ,
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric4" , "foo" , "bar" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "lul" ) ,
Value : 10 ,
Ts : timestamp . FromTime ( start . Add ( 4 * time . Second ) ) ,
} ,
} ,
} ,
} ,
} ,
{
endpoint : api . queryExemplars ,
query : url . Values {
"query" : [ ] string { ` { foo="boo"} ` } ,
"start" : [ ] string { "4" } ,
"end" : [ ] string { "4.1" } ,
} ,
response : [ ] exemplar . QueryResult {
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc2" ) ,
Value : 10 ,
Ts : 4053 ,
} ,
} ,
} ,
} ,
} ,
{
endpoint : api . queryExemplars ,
query : url . Values {
"query" : [ ] string { ` { foo="boo"} ` } ,
} ,
response : [ ] exemplar . QueryResult {
{
SeriesLabels : labels . FromStrings ( "__name__" , "test_metric3" , "foo" , "boo" , "dup" , "1" ) ,
Exemplars : [ ] exemplar . Exemplar {
{
Labels : labels . FromStrings ( "id" , "abc" ) ,
Value : 10 ,
Ts : 2000 ,
} ,
{
Labels : labels . FromStrings ( "id" , "abc2" ) ,
Value : 10 ,
Ts : 4053 ,
} ,
} ,
} ,
} ,
} ,
{
endpoint : api . queryExemplars ,
query : url . Values {
"query" : [ ] string { ` { __name__="test_metric5"} ` } ,
} ,
response : [ ] exemplar . QueryResult { } ,
} ,
2015-06-04 16:07:57 +00:00
}
2018-06-16 17:26:37 +00:00
if testLabelAPI {
tests = append ( tests , [ ] test {
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "__name__" ,
} ,
response : [ ] string {
"test_metric1" ,
"test_metric2" ,
2020-08-28 23:21:39 +00:00
"test_metric3" ,
"test_metric4" ,
2018-06-16 17:26:37 +00:00
} ,
} ,
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Bad name parameter.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "not!!!allowed" ,
} ,
errType : errorBadData ,
} ,
2020-05-30 12:50:09 +00:00
// Start and end before LabelValues starts.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "-2" } ,
"end" : [ ] string { "-1" } ,
} ,
response : [ ] string { } ,
} ,
// Start and end within LabelValues.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Start before LabelValues, end within LabelValues.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "-1" } ,
"end" : [ ] string { "3" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Start before LabelValues starts, end after LabelValues ends.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "1969-12-31T00:00:00Z" } ,
"end" : [ ] string { "1970-02-01T00:02:03Z" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Start with bad data, end within LabelValues.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "boop" } ,
"end" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
// Start within LabelValues, end after.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100000000" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Start and end after LabelValues ends.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "148966367200.372" } ,
"end" : [ ] string { "148966367200.972" } ,
} ,
response : [ ] string { } ,
} ,
// Only provide Start within LabelValues, don't provide an end time.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"start" : [ ] string { "2" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Only provide end within LabelValues, don't provide a start time.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"end" : [ ] string { "100" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
2020-12-22 11:02:19 +00:00
// Label values with bad matchers.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` { foo="" ` , ` test_metric2 ` } ,
} ,
errType : errorBadData ,
} ,
// Label values with empty matchers.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` { foo=""} ` } ,
} ,
errType : errorBadData ,
} ,
// Label values with matcher.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
} ,
response : [ ] string {
"boo" ,
} ,
} ,
// Label values with matcher.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` test_metric1 ` } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
// Label values with matcher using label filter.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` test_metric1 { foo="bar"} ` } ,
} ,
response : [ ] string {
"bar" ,
} ,
} ,
// Label values with matcher and time range.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` test_metric1 ` } ,
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100000000" } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
2021-02-09 17:38:35 +00:00
// Try to overlap the selected series set as much as possible to test that the value de-duplication works.
{
endpoint : api . labelValues ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` test_metric4 { dup=~"^1"} ` , ` test_metric4 { foo=~".+o$"} ` } ,
} ,
response : [ ] string {
"bar" ,
"boo" ,
} ,
} ,
2018-11-19 10:21:14 +00:00
// Label names.
{
endpoint : api . labelNames ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2018-11-19 10:21:14 +00:00
} ,
2020-05-30 12:50:09 +00:00
// Start and end before Label names starts.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "-2" } ,
"end" : [ ] string { "-1" } ,
} ,
response : [ ] string { } ,
} ,
// Start and end within Label names.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
// Start before Label names, end within Label names.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "-1" } ,
"end" : [ ] string { "10" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
// Start before Label names starts, end after Label names ends.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "-1" } ,
"end" : [ ] string { "100000" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
// Start with bad data for Label names, end within Label names.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "boop" } ,
"end" : [ ] string { "1" } ,
} ,
errType : errorBadData ,
} ,
// Start within Label names, end after.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "1" } ,
"end" : [ ] string { "1000000006" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
// Start and end after Label names ends.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "148966367200.372" } ,
"end" : [ ] string { "148966367200.972" } ,
} ,
response : [ ] string { } ,
} ,
// Only provide Start within Label names, don't provide an end time.
{
endpoint : api . labelNames ,
query : url . Values {
"start" : [ ] string { "4" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
// Only provide End within Label names, don't provide a start time.
{
endpoint : api . labelNames ,
query : url . Values {
"end" : [ ] string { "20" } ,
} ,
2020-08-28 23:21:39 +00:00
response : [ ] string { "__name__" , "dup" , "foo" } ,
2020-05-30 12:50:09 +00:00
} ,
2020-12-22 11:02:19 +00:00
// Label names with bad matchers.
{
endpoint : api . labelNames ,
query : url . Values {
"match[]" : [ ] string { ` { foo="" ` , ` test_metric2 ` } ,
} ,
errType : errorBadData ,
} ,
// Label values with empty matchers.
{
endpoint : api . labelNames ,
params : map [ string ] string {
"name" : "foo" ,
} ,
query : url . Values {
"match[]" : [ ] string { ` { foo=""} ` } ,
} ,
errType : errorBadData ,
} ,
// Label names with matcher.
{
endpoint : api . labelNames ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
} ,
response : [ ] string { "__name__" , "foo" } ,
} ,
// Label names with matcher.
{
endpoint : api . labelNames ,
query : url . Values {
"match[]" : [ ] string { ` test_metric3 ` } ,
} ,
response : [ ] string { "__name__" , "dup" , "foo" } ,
} ,
// Label names with matcher using label filter.
// There is no matching series.
{
endpoint : api . labelNames ,
query : url . Values {
"match[]" : [ ] string { ` test_metric1 { foo="test"} ` } ,
} ,
response : [ ] string { } ,
} ,
// Label names with matcher and time range.
{
endpoint : api . labelNames ,
query : url . Values {
"match[]" : [ ] string { ` test_metric2 ` } ,
"start" : [ ] string { "1" } ,
"end" : [ ] string { "100000000" } ,
} ,
response : [ ] string { "__name__" , "foo" } ,
} ,
2018-06-16 17:26:37 +00:00
} ... )
}
2017-11-11 00:53:48 +00:00
methods := func ( f apiFunc ) [ ] string {
fp := reflect . ValueOf ( f ) . Pointer ( )
2019-04-02 17:00:29 +00:00
if fp == reflect . ValueOf ( api . query ) . Pointer ( ) || fp == reflect . ValueOf ( api . queryRange ) . Pointer ( ) || fp == reflect . ValueOf ( api . series ) . Pointer ( ) {
2017-11-11 00:53:48 +00:00
return [ ] string { http . MethodGet , http . MethodPost }
2015-06-08 19:19:52 +00:00
}
2017-11-11 00:53:48 +00:00
return [ ] string { http . MethodGet }
}
2015-06-08 19:19:52 +00:00
2017-11-11 00:53:48 +00:00
request := func ( m string , q url . Values ) ( * http . Request , error ) {
if m == http . MethodPost {
r , err := http . NewRequest ( m , "http://example.com" , strings . NewReader ( q . Encode ( ) ) )
r . Header . Set ( "Content-Type" , "application/x-www-form-urlencoded" )
2020-01-08 13:28:43 +00:00
r . RemoteAddr = "127.0.0.1:20201"
2017-11-11 00:53:48 +00:00
return r , err
2015-06-04 16:07:57 +00:00
}
2020-01-08 13:28:43 +00:00
r , err := http . NewRequest ( m , fmt . Sprintf ( "http://example.com?%s" , q . Encode ( ) ) , nil )
r . RemoteAddr = "127.0.0.1:20201"
return r , err
2017-11-11 00:53:48 +00:00
}
2018-06-16 17:26:37 +00:00
for i , test := range tests {
2020-07-31 15:03:02 +00:00
t . Run ( fmt . Sprintf ( "run %d %s %q" , i , describeAPIFunc ( test . endpoint ) , test . query . Encode ( ) ) , func ( t * testing . T ) {
for _ , method := range methods ( test . endpoint ) {
t . Run ( method , func ( t * testing . T ) {
// Build a context with the correct request params.
ctx := context . Background ( )
for p , v := range test . params {
ctx = route . WithParam ( ctx , p , v )
}
req , err := request ( method , test . query )
if err != nil {
t . Fatal ( err )
}
tr . ResetMetadataStore ( )
for _ , tm := range test . metadata {
tr . SetMetadataStoreForTargets ( tm . identifier , & testMetaStore { Metadata : tm . metadata } )
}
2021-03-16 09:47:45 +00:00
for _ , te := range test . exemplars {
for _ , e := range te . Exemplars {
_ , err := es . AppendExemplar ( 0 , te . SeriesLabels , e )
if err != nil {
t . Fatal ( err )
}
}
}
2020-07-31 15:03:02 +00:00
res := test . endpoint ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , test . errType )
if test . sorter != nil {
test . sorter ( res . data )
}
if test . responseLen != 0 {
assertAPIResponseLength ( t , res . data , test . responseLen )
2023-06-12 15:17:20 +00:00
if test . responseMetadataTotal != 0 {
assertAPIResponseMetadataLen ( t , res . data , test . responseMetadataTotal )
}
2020-07-31 15:03:02 +00:00
} else {
2023-10-18 02:02:03 +00:00
if test . zeroFunc != nil {
test . zeroFunc ( res . data )
}
2020-07-31 15:03:02 +00:00
assertAPIResponse ( t , res . data , test . response )
}
} )
2019-12-10 14:56:16 +00:00
}
2020-07-31 15:03:02 +00:00
} )
2018-11-15 13:22:16 +00:00
}
}
2018-06-27 07:15:17 +00:00
2020-07-31 15:03:02 +00:00
func describeAPIFunc ( f apiFunc ) string {
name := runtime . FuncForPC ( reflect . ValueOf ( f ) . Pointer ( ) ) . Name ( )
return strings . Split ( name [ strings . LastIndex ( name , "." ) + 1 : ] , "-" ) [ 0 ]
}
2018-11-15 13:22:16 +00:00
func assertAPIError ( t * testing . T , got * apiError , exp errorType ) {
t . Helper ( )
2018-06-27 07:15:17 +00:00
2018-11-15 13:22:16 +00:00
if got != nil {
if exp == errorNone {
t . Fatalf ( "Unexpected error: %s" , got )
}
if exp != got . typ {
t . Fatalf ( "Expected error of type %q but got type %q (%q)" , exp , got . typ , got )
2015-06-04 16:07:57 +00:00
}
2018-11-15 13:22:16 +00:00
return
}
2019-05-03 13:11:28 +00:00
if exp != errorNone {
2018-11-15 13:22:16 +00:00
t . Fatalf ( "Expected error of type %q but got none" , exp )
}
}
2021-10-22 08:06:44 +00:00
func assertAPIResponse ( t * testing . T , got , exp interface { } ) {
2019-12-09 21:36:38 +00:00
t . Helper ( )
2020-10-29 09:43:23 +00:00
require . Equal ( t , exp , got )
2015-06-04 16:07:57 +00:00
}
2019-12-10 14:56:16 +00:00
func assertAPIResponseLength ( t * testing . T , got interface { } , expLen int ) {
t . Helper ( )
gotLen := reflect . ValueOf ( got ) . Len ( )
if gotLen != expLen {
t . Fatalf (
"Response length does not match, expected:\n%d\ngot:\n%d" ,
expLen ,
gotLen ,
)
}
}
2023-06-12 15:17:20 +00:00
func assertAPIResponseMetadataLen ( t * testing . T , got interface { } , expLen int ) {
t . Helper ( )
var gotLen int
response := got . ( map [ string ] [ ] metadata )
for _ , m := range response {
gotLen += len ( m )
}
if gotLen != expLen {
t . Fatalf (
"Amount of metadata in the response does not match, expected:\n%d\ngot:\n%d" ,
expLen ,
gotLen ,
)
}
}
2018-11-15 13:22:16 +00:00
type fakeDB struct {
2020-04-29 16:16:14 +00:00
err error
2018-11-15 13:22:16 +00:00
}
2023-09-13 13:43:06 +00:00
func ( f * fakeDB ) CleanTombstones ( ) error { return f . err }
func ( f * fakeDB ) Delete ( context . Context , int64 , int64 , ... * labels . Matcher ) error { return f . err }
func ( f * fakeDB ) Snapshot ( string , bool ) error { return f . err }
2023-05-22 12:37:07 +00:00
func ( f * fakeDB ) Stats ( statsByLabelName string , limit int ) ( _ * tsdb . Stats , retErr error ) {
2022-04-27 09:24:36 +00:00
dbDir , err := os . MkdirTemp ( "" , "tsdb-api-ready" )
2020-05-06 15:30:00 +00:00
if err != nil {
return nil , err
}
defer func ( ) {
err := os . RemoveAll ( dbDir )
if retErr != nil {
retErr = err
}
} ( )
2021-02-09 14:12:48 +00:00
opts := tsdb . DefaultHeadOptions ( )
opts . ChunkRange = 1000
2022-09-20 17:05:50 +00:00
h , _ := tsdb . NewHead ( nil , nil , nil , nil , opts , nil )
2023-05-22 12:37:07 +00:00
return h . Stats ( statsByLabelName , limit ) , nil
2019-11-12 10:15:20 +00:00
}
2021-10-22 08:06:44 +00:00
2021-06-05 14:29:32 +00:00
func ( f * fakeDB ) WALReplayStatus ( ) ( tsdb . WALReplayStatus , error ) {
return tsdb . WALReplayStatus { } , nil
}
2018-11-15 13:22:16 +00:00
func TestAdminEndpoints ( t * testing . T ) {
2023-11-08 03:49:39 +00:00
tsdb , tsdbWithError , tsdbNotReady := & fakeDB { } , & fakeDB { err : errors . New ( "some error" ) } , & fakeDB { err : fmt . Errorf ( "wrap: %w" , tsdb . ErrNotReady ) }
2018-11-15 13:22:16 +00:00
snapshotAPI := func ( api * API ) apiFunc { return api . snapshot }
cleanAPI := func ( api * API ) apiFunc { return api . cleanTombstones }
deleteAPI := func ( api * API ) apiFunc { return api . deleteSeries }
2020-04-29 16:16:14 +00:00
for _ , tc := range [ ] struct {
2018-11-15 13:22:16 +00:00
db * fakeDB
enableAdmin bool
endpoint func ( api * API ) apiFunc
method string
values url . Values
errType errorType
} {
// Tests for the snapshot endpoint.
{
db : tsdb ,
enableAdmin : false ,
endpoint : snapshotAPI ,
errType : errorUnavailable ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : snapshotAPI ,
errType : errorNone ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : snapshotAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "skip_head" : { "true" } } ,
2018-11-15 13:22:16 +00:00
errType : errorNone ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : snapshotAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "skip_head" : { "xxx" } } ,
2018-11-15 13:22:16 +00:00
errType : errorBadData ,
} ,
{
db : tsdbWithError ,
enableAdmin : true ,
endpoint : snapshotAPI ,
errType : errorInternal ,
} ,
{
2020-04-29 16:16:14 +00:00
db : tsdbNotReady ,
2018-11-15 13:22:16 +00:00
enableAdmin : true ,
endpoint : snapshotAPI ,
errType : errorUnavailable ,
} ,
// Tests for the cleanTombstones endpoint.
{
db : tsdb ,
enableAdmin : false ,
endpoint : cleanAPI ,
errType : errorUnavailable ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : cleanAPI ,
errType : errorNone ,
} ,
{
db : tsdbWithError ,
enableAdmin : true ,
endpoint : cleanAPI ,
errType : errorInternal ,
} ,
{
2020-04-29 16:16:14 +00:00
db : tsdbNotReady ,
2018-11-15 13:22:16 +00:00
enableAdmin : true ,
endpoint : cleanAPI ,
errType : errorUnavailable ,
} ,
// Tests for the deleteSeries endpoint.
{
db : tsdb ,
enableAdmin : false ,
endpoint : deleteAPI ,
errType : errorUnavailable ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
errType : errorBadData ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "123" } } ,
2018-11-15 13:22:16 +00:00
errType : errorBadData ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "up" } , "start" : { "xxx" } } ,
2018-11-15 13:22:16 +00:00
errType : errorBadData ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "up" } , "end" : { "xxx" } } ,
2018-11-15 13:22:16 +00:00
errType : errorBadData ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "up" } } ,
2018-11-15 13:22:16 +00:00
errType : errorNone ,
} ,
{
db : tsdb ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "up{job!=\"foo\"}" , "{job=~\"bar.+\"}" , "up{instance!~\"fred.+\"}" } } ,
2018-11-15 13:22:16 +00:00
errType : errorNone ,
} ,
{
db : tsdbWithError ,
enableAdmin : true ,
endpoint : deleteAPI ,
2019-01-16 22:28:08 +00:00
values : map [ string ] [ ] string { "match[]" : { "up" } } ,
2018-11-15 13:22:16 +00:00
errType : errorInternal ,
} ,
{
2020-04-29 16:16:14 +00:00
db : tsdbNotReady ,
2018-11-15 13:22:16 +00:00
enableAdmin : true ,
endpoint : deleteAPI ,
2020-04-29 16:16:14 +00:00
values : map [ string ] [ ] string { "match[]" : { "up" } } ,
2018-11-15 13:22:16 +00:00
errType : errorUnavailable ,
} ,
} {
tc := tc
2020-04-29 16:16:14 +00:00
t . Run ( "" , func ( t * testing . T ) {
2021-12-08 22:14:50 +00:00
dir := t . TempDir ( )
2020-04-29 16:16:14 +00:00
2018-11-15 13:22:16 +00:00
api := & API {
2020-04-29 16:16:14 +00:00
db : tc . db ,
dbDir : dir ,
2018-11-15 13:22:16 +00:00
ready : func ( f http . HandlerFunc ) http . HandlerFunc { return f } ,
enableAdmin : tc . enableAdmin ,
}
endpoint := tc . endpoint ( api )
req , err := http . NewRequest ( tc . method , fmt . Sprintf ( "?%s" , tc . values . Encode ( ) ) , nil )
2020-10-29 09:43:23 +00:00
require . NoError ( t , err )
2020-04-29 16:16:14 +00:00
res := setUnavailStatusOnTSDBNotReady ( endpoint ( req ) )
2018-11-30 14:27:12 +00:00
assertAPIError ( t , res . err , tc . errType )
2018-11-15 13:22:16 +00:00
} )
}
}
2015-06-04 16:07:57 +00:00
func TestRespondSuccess ( t * testing . T ) {
2023-01-25 03:30:47 +00:00
api := API {
logger : log . NewNopLogger ( ) ,
}
2023-02-27 02:27:09 +00:00
api . ClearCodecs ( )
api . InstallCodec ( JSONCodec { } )
api . InstallCodec ( & testCodec { contentType : MIMEType { "test" , "cannot-encode" } , canEncode : false } )
api . InstallCodec ( & testCodec { contentType : MIMEType { "test" , "can-encode" } , canEncode : true } )
api . InstallCodec ( & testCodec { contentType : MIMEType { "test" , "can-encode-2" } , canEncode : true } )
2023-01-25 03:30:47 +00:00
2015-07-02 08:37:19 +00:00
s := httptest . NewServer ( http . HandlerFunc ( func ( w http . ResponseWriter , r * http . Request ) {
2023-09-14 16:57:31 +00:00
api . respond ( w , r , "test" , nil , "" )
2015-07-02 08:37:19 +00:00
} ) )
defer s . Close ( )
2015-06-04 16:07:57 +00:00
2023-01-25 03:30:47 +00:00
for _ , tc := range [ ] struct {
name string
acceptHeader string
expectedContentType string
expectedBody string
} {
{
name : "no Accept header" ,
expectedContentType : "application/json" ,
expectedBody : ` { "status":"success","data":"test"} ` ,
} ,
{
name : "Accept header with single content type which is suitable" ,
acceptHeader : "test/can-encode" ,
expectedContentType : "test/can-encode" ,
expectedBody : ` response from test/can-encode codec ` ,
} ,
{
name : "Accept header with single content type which is not available" ,
acceptHeader : "test/not-registered" ,
expectedContentType : "application/json" ,
expectedBody : ` { "status":"success","data":"test"} ` ,
} ,
{
name : "Accept header with single content type which cannot encode the response payload" ,
acceptHeader : "test/cannot-encode" ,
expectedContentType : "application/json" ,
expectedBody : ` { "status":"success","data":"test"} ` ,
} ,
{
name : "Accept header with multiple content types, all of which are suitable" ,
acceptHeader : "test/can-encode, test/can-encode-2" ,
expectedContentType : "test/can-encode" ,
expectedBody : ` response from test/can-encode codec ` ,
} ,
{
name : "Accept header with multiple content types, only one of which is available" ,
acceptHeader : "test/not-registered, test/can-encode" ,
expectedContentType : "test/can-encode" ,
expectedBody : ` response from test/can-encode codec ` ,
} ,
{
name : "Accept header with multiple content types, only one of which can encode the response payload" ,
acceptHeader : "test/cannot-encode, test/can-encode" ,
expectedContentType : "test/can-encode" ,
expectedBody : ` response from test/can-encode codec ` ,
} ,
{
name : "Accept header with multiple content types, none of which are available" ,
acceptHeader : "test/not-registered, test/also-not-registered" ,
expectedContentType : "application/json" ,
expectedBody : ` { "status":"success","data":"test"} ` ,
} ,
} {
t . Run ( tc . name , func ( t * testing . T ) {
req , err := http . NewRequest ( http . MethodGet , s . URL , nil )
require . NoError ( t , err )
2015-06-04 16:07:57 +00:00
2023-01-25 03:30:47 +00:00
if tc . acceptHeader != "" {
req . Header . Set ( "Accept" , tc . acceptHeader )
}
2015-07-02 08:37:19 +00:00
2023-01-25 03:30:47 +00:00
resp , err := http . DefaultClient . Do ( req )
require . NoError ( t , err )
body , err := io . ReadAll ( resp . Body )
defer resp . Body . Close ( )
require . NoError ( t , err )
2015-06-04 16:07:57 +00:00
2023-01-25 03:30:47 +00:00
require . Equal ( t , http . StatusOK , resp . StatusCode )
require . Equal ( t , tc . expectedContentType , resp . Header . Get ( "Content-Type" ) )
require . Equal ( t , tc . expectedBody , string ( body ) )
} )
2015-06-04 16:07:57 +00:00
}
}
2023-02-27 02:27:09 +00:00
func TestRespondSuccess_DefaultCodecCannotEncodeResponse ( t * testing . T ) {
api := API {
logger : log . NewNopLogger ( ) ,
2015-06-04 16:07:57 +00:00
}
2023-02-27 02:27:09 +00:00
api . ClearCodecs ( )
api . InstallCodec ( & testCodec { contentType : MIMEType { "application" , "default-format" } , canEncode : false } )
s := httptest . NewServer ( http . HandlerFunc ( func ( w http . ResponseWriter , r * http . Request ) {
2023-09-14 16:57:31 +00:00
api . respond ( w , r , "test" , nil , "" )
2023-02-27 02:27:09 +00:00
} ) )
defer s . Close ( )
req , err := http . NewRequest ( http . MethodGet , s . URL , nil )
require . NoError ( t , err )
resp , err := http . DefaultClient . Do ( req )
require . NoError ( t , err )
body , err := io . ReadAll ( resp . Body )
defer resp . Body . Close ( )
require . NoError ( t , err )
require . Equal ( t , http . StatusNotAcceptable , resp . StatusCode )
require . Equal ( t , "application/json" , resp . Header . Get ( "Content-Type" ) )
require . Equal ( t , ` { "status":"error","errorType":"not_acceptable","error":"cannot encode response as application/default-format"} ` , string ( body ) )
2015-06-04 16:07:57 +00:00
}
func TestRespondError ( t * testing . T ) {
2015-07-02 08:37:19 +00:00
s := httptest . NewServer ( http . HandlerFunc ( func ( w http . ResponseWriter , r * http . Request ) {
2018-07-06 17:44:45 +00:00
api := API { }
api . respondError ( w , & apiError { errorTimeout , errors . New ( "message" ) } , "test" )
2015-07-02 08:37:19 +00:00
} ) )
defer s . Close ( )
2015-06-04 16:07:57 +00:00
2015-07-02 08:37:19 +00:00
resp , err := http . Get ( s . URL )
if err != nil {
t . Fatalf ( "Error on test request: %s" , err )
2015-06-04 16:07:57 +00:00
}
2022-04-27 09:24:36 +00:00
body , err := io . ReadAll ( resp . Body )
2015-07-02 08:37:19 +00:00
defer resp . Body . Close ( )
2015-06-04 16:07:57 +00:00
if err != nil {
2015-07-02 08:37:19 +00:00
t . Fatalf ( "Error reading response body: %s" , err )
2015-06-04 16:07:57 +00:00
}
2015-11-11 22:00:54 +00:00
if want , have := http . StatusServiceUnavailable , resp . StatusCode ; want != have {
t . Fatalf ( "Return code %d expected in error response but got %d" , want , have )
2015-07-02 08:37:19 +00:00
}
if h := resp . Header . Get ( "Content-Type" ) ; h != "application/json" {
t . Fatalf ( "Expected Content-Type %q but got %q" , "application/json" , h )
}
2023-01-25 03:30:47 +00:00
var res Response
2023-04-09 07:08:40 +00:00
if err = json . Unmarshal ( body , & res ) ; err != nil {
2015-07-02 08:37:19 +00:00
t . Fatalf ( "Error unmarshaling JSON body: %s" , err )
2015-06-04 16:07:57 +00:00
}
2023-01-25 03:30:47 +00:00
exp := & Response {
2015-06-04 16:07:57 +00:00
Status : statusError ,
Data : "test" ,
ErrorType : errorTimeout ,
Error : "message" ,
}
2020-10-29 09:43:23 +00:00
require . Equal ( t , exp , & res )
2015-06-04 16:07:57 +00:00
}
2020-03-06 10:33:01 +00:00
func TestParseTimeParam ( t * testing . T ) {
type resultType struct {
asTime time . Time
asError func ( ) error
}
ts , err := parseTime ( "1582468023986" )
2020-10-29 09:43:23 +00:00
require . NoError ( t , err )
2020-03-06 10:33:01 +00:00
2021-10-22 08:06:44 +00:00
tests := [ ] struct {
2020-03-06 10:33:01 +00:00
paramName string
paramValue string
defaultValue time . Time
result resultType
} {
{ // When data is valid.
paramName : "start" ,
paramValue : "1582468023986" ,
2023-07-06 15:48:13 +00:00
defaultValue : MinTime ,
2020-03-06 10:33:01 +00:00
result : resultType {
asTime : ts ,
asError : nil ,
} ,
} ,
{ // When data is empty string.
paramName : "end" ,
paramValue : "" ,
2023-07-06 15:48:13 +00:00
defaultValue : MaxTime ,
2020-03-06 10:33:01 +00:00
result : resultType {
2023-07-06 15:48:13 +00:00
asTime : MaxTime ,
2020-03-06 10:33:01 +00:00
asError : nil ,
} ,
} ,
{ // When data is not valid.
paramName : "foo" ,
paramValue : "baz" ,
2023-07-06 15:48:13 +00:00
defaultValue : MaxTime ,
2020-03-06 10:33:01 +00:00
result : resultType {
asTime : time . Time { } ,
asError : func ( ) error {
_ , err := parseTime ( "baz" )
2023-11-08 03:49:39 +00:00
return fmt . Errorf ( "Invalid time value for '%s': %w" , "foo" , err )
2020-03-06 10:33:01 +00:00
} ,
} ,
} ,
}
for _ , test := range tests {
req , err := http . NewRequest ( "GET" , "localhost:42/foo?" + test . paramName + "=" + test . paramValue , nil )
2020-10-29 09:43:23 +00:00
require . NoError ( t , err )
2020-03-06 10:33:01 +00:00
result := test . result
asTime , err := parseTimeParam ( req , test . paramName , test . defaultValue )
if err != nil {
2020-10-29 09:43:23 +00:00
require . EqualError ( t , err , result . asError ( ) . Error ( ) )
2020-03-06 10:33:01 +00:00
} else {
2020-10-29 09:43:23 +00:00
require . True ( t , asTime . Equal ( result . asTime ) , "time as return value: %s not parsed correctly. Expected %s. Actual %s" , test . paramValue , result . asTime , asTime )
2020-03-06 10:33:01 +00:00
}
}
}
2015-06-04 16:07:57 +00:00
func TestParseTime ( t * testing . T ) {
ts , err := time . Parse ( time . RFC3339Nano , "2015-06-03T13:21:58.555Z" )
if err != nil {
panic ( err )
}
2021-10-22 08:06:44 +00:00
tests := [ ] struct {
2015-06-04 16:07:57 +00:00
input string
fail bool
result time . Time
} {
{
input : "" ,
fail : true ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "abc" ,
fail : true ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "30s" ,
fail : true ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "123" ,
result : time . Unix ( 123 , 0 ) ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "123.123" ,
result : time . Unix ( 123 , 123000000 ) ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "2015-06-03T13:21:58.555Z" ,
result : ts ,
2021-10-22 08:06:44 +00:00
} ,
{
2015-06-04 16:07:57 +00:00
input : "2015-06-03T14:21:58.555+01:00" ,
result : ts ,
2021-10-22 08:06:44 +00:00
} ,
{
2018-12-03 12:25:54 +00:00
// Test float rounding.
input : "1543578564.705" ,
result : time . Unix ( 1543578564 , 705 * 1e6 ) ,
2015-06-04 16:07:57 +00:00
} ,
2019-07-08 09:43:59 +00:00
{
2023-07-06 15:48:13 +00:00
input : MinTime . Format ( time . RFC3339Nano ) ,
result : MinTime ,
2019-07-08 09:43:59 +00:00
} ,
{
2023-07-06 15:48:13 +00:00
input : MaxTime . Format ( time . RFC3339Nano ) ,
result : MaxTime ,
2019-07-08 09:43:59 +00:00
} ,
2015-06-04 16:07:57 +00:00
}
for _ , test := range tests {
ts , err := parseTime ( test . input )
if err != nil && ! test . fail {
t . Errorf ( "Unexpected error for %q: %s" , test . input , err )
continue
}
if err == nil && test . fail {
t . Errorf ( "Expected error for %q but got none" , test . input )
continue
}
2016-12-30 09:43:44 +00:00
if ! test . fail && ! ts . Equal ( test . result ) {
t . Errorf ( "Expected time %v for input %q but got %v" , test . result , test . input , ts )
2015-06-04 16:07:57 +00:00
}
}
}
func TestParseDuration ( t * testing . T ) {
2021-10-22 08:06:44 +00:00
tests := [ ] struct {
2015-06-04 16:07:57 +00:00
input string
fail bool
result time . Duration
} {
{
input : "" ,
fail : true ,
} , {
input : "abc" ,
fail : true ,
} , {
input : "2015-06-03T13:21:58.555Z" ,
fail : true ,
2017-03-16 14:16:20 +00:00
} , {
// Internal int64 overflow.
input : "-148966367200.372" ,
fail : true ,
} , {
// Internal int64 overflow.
input : "148966367200.372" ,
fail : true ,
2015-06-04 16:07:57 +00:00
} , {
input : "123" ,
result : 123 * time . Second ,
} , {
input : "123.333" ,
result : 123 * time . Second + 333 * time . Millisecond ,
} , {
input : "15s" ,
result : 15 * time . Second ,
} , {
input : "5m" ,
result : 5 * time . Minute ,
} ,
}
for _ , test := range tests {
d , err := parseDuration ( test . input )
if err != nil && ! test . fail {
t . Errorf ( "Unexpected error for %q: %s" , test . input , err )
continue
}
if err == nil && test . fail {
t . Errorf ( "Expected error for %q but got none" , test . input )
continue
}
if ! test . fail && d != test . result {
t . Errorf ( "Expected duration %v for input %q but got %v" , test . result , test . input , d )
}
}
}
2016-01-26 00:32:46 +00:00
func TestOptionsMethod ( t * testing . T ) {
2017-05-02 23:49:29 +00:00
r := route . New ( )
2017-10-06 15:20:20 +00:00
api := & API { ready : func ( f http . HandlerFunc ) http . HandlerFunc { return f } }
2016-01-26 00:32:46 +00:00
api . Register ( r )
s := httptest . NewServer ( r )
defer s . Close ( )
req , err := http . NewRequest ( "OPTIONS" , s . URL + "/any_path" , nil )
if err != nil {
t . Fatalf ( "Error creating OPTIONS request: %s" , err )
}
client := & http . Client { }
resp , err := client . Do ( req )
if err != nil {
t . Fatalf ( "Error executing OPTIONS request: %s" , err )
}
if resp . StatusCode != http . StatusNoContent {
t . Fatalf ( "Expected status %d, got %d" , http . StatusNoContent , resp . StatusCode )
}
}
2018-02-07 15:40:36 +00:00
2019-11-12 10:15:20 +00:00
func TestTSDBStatus ( t * testing . T ) {
tsdb := & fakeDB { }
tsdbStatusAPI := func ( api * API ) apiFunc { return api . serveTSDBStatus }
for i , tc := range [ ] struct {
db * fakeDB
endpoint func ( api * API ) apiFunc
method string
values url . Values
errType errorType
} {
// Tests for the TSDB Status endpoint.
{
db : tsdb ,
endpoint : tsdbStatusAPI ,
2023-05-22 12:37:07 +00:00
errType : errorNone ,
} ,
{
db : tsdb ,
endpoint : tsdbStatusAPI ,
values : map [ string ] [ ] string { "limit" : { "20" } } ,
errType : errorNone ,
} ,
{
db : tsdb ,
endpoint : tsdbStatusAPI ,
values : map [ string ] [ ] string { "limit" : { "0" } } ,
errType : errorBadData ,
2019-11-12 10:15:20 +00:00
} ,
} {
tc := tc
t . Run ( fmt . Sprintf ( "%d" , i ) , func ( t * testing . T ) {
2020-09-29 20:05:33 +00:00
api := & API { db : tc . db , gatherer : prometheus . DefaultGatherer }
2019-11-12 10:15:20 +00:00
endpoint := tc . endpoint ( api )
req , err := http . NewRequest ( tc . method , fmt . Sprintf ( "?%s" , tc . values . Encode ( ) ) , nil )
if err != nil {
t . Fatalf ( "Error when creating test request: %s" , err )
}
res := endpoint ( req )
assertAPIError ( t , res . err , tc . errType )
} )
}
}
2020-06-22 14:29:35 +00:00
func TestReturnAPIError ( t * testing . T ) {
cases := [ ] struct {
err error
expected errorType
} {
{
err : promql . ErrStorage { Err : errors . New ( "storage error" ) } ,
expected : errorInternal ,
} , {
2022-06-13 15:45:35 +00:00
err : fmt . Errorf ( "wrapped: %w" , promql . ErrStorage { Err : errors . New ( "storage error" ) } ) ,
2020-06-22 14:29:35 +00:00
expected : errorInternal ,
} , {
err : promql . ErrQueryTimeout ( "timeout error" ) ,
expected : errorTimeout ,
} , {
2022-06-13 15:45:35 +00:00
err : fmt . Errorf ( "wrapped: %w" , promql . ErrQueryTimeout ( "timeout error" ) ) ,
2020-06-22 14:29:35 +00:00
expected : errorTimeout ,
} , {
err : promql . ErrQueryCanceled ( "canceled error" ) ,
expected : errorCanceled ,
} , {
2022-06-13 15:45:35 +00:00
err : fmt . Errorf ( "wrapped: %w" , promql . ErrQueryCanceled ( "canceled error" ) ) ,
2020-06-22 14:29:35 +00:00
expected : errorCanceled ,
} , {
err : errors . New ( "exec error" ) ,
expected : errorExec ,
} ,
}
2022-06-13 15:45:35 +00:00
for ix , c := range cases {
2020-06-22 14:29:35 +00:00
actual := returnAPIError ( c . err )
2022-06-13 15:45:35 +00:00
require . Error ( t , actual , ix )
require . Equal ( t , c . expected , actual . typ , ix )
2020-06-22 14:29:35 +00:00
}
}
2018-02-07 15:40:36 +00:00
// This is a global to avoid the benchmark being optimized away.
var testResponseWriter = httptest . ResponseRecorder { }
func BenchmarkRespond ( b * testing . B ) {
promql: Separate `Point` into `FPoint` and `HPoint`
In other words: Instead of having a “polymorphous” `Point` that can
either contain a float value or a histogram value, use an `FPoint` for
floats and an `HPoint` for histograms.
This seemingly small change has a _lot_ of repercussions throughout
the codebase.
The idea here is to avoid the increase in size of `Point` arrays that
happened after native histograms had been added.
The higher-level data structures (`Sample`, `Series`, etc.) are still
“polymorphous”. The same idea could be applied to them, but at each
step the trade-offs needed to be evaluated.
The idea with this change is to do the minimum necessary to get back
to pre-histogram performance for functions that do not touch
histograms. Here are comparisons for the `changes` function. The test
data doesn't include histograms yet. Ideally, there would be no change
in the benchmark result at all.
First runtime v2.39 compared to directly prior to this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 542µs ± 1% +38.58% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 617µs ± 2% +36.48% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.36ms ± 2% +21.58% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 8.94ms ± 1% +14.21% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.30ms ± 1% +10.67% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.10ms ± 1% +11.82% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 11.8ms ± 1% +12.50% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 87.4ms ± 1% +12.63% (p=0.000 n=9+9)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 32.8ms ± 1% +8.01% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.6ms ± 2% +9.64% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 117ms ± 1% +11.69% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 876ms ± 1% +11.83% (p=0.000 n=9+10)
```
And then runtime v2.39 compared to after this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 547µs ± 1% +39.84% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 616µs ± 2% +36.15% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.26ms ± 1% +12.20% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 7.95ms ± 1% +1.59% (p=0.000 n=10+8)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.38ms ± 2% +13.49% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.02ms ± 1% +9.80% (p=0.000 n=10+9)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 10.8ms ± 1% +3.08% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 78.1ms ± 1% +0.58% (p=0.035 n=9+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 33.5ms ± 4% +10.18% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.0ms ± 1% +7.98% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 107ms ± 1% +1.92% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 775ms ± 1% -1.02% (p=0.019 n=9+9)
```
In summary, the runtime doesn't really improve with this change for
queries with just a few steps. For queries with many steps, this
commit essentially reinstates the old performance. This is good
because the many-step queries are the one that matter most (longest
absolute runtime).
In terms of allocations, though, this commit doesn't make a dent at
all (numbers not shown). The reason is that most of the allocations
happen in the sampleRingIterator (in the storage package), which has
to be addressed in a separate commit.
Signed-off-by: beorn7 <beorn@grafana.com>
2022-10-28 14:58:40 +00:00
points := [ ] promql . FPoint { }
2018-02-07 15:40:36 +00:00
for i := 0 ; i < 10000 ; i ++ {
promql: Separate `Point` into `FPoint` and `HPoint`
In other words: Instead of having a “polymorphous” `Point` that can
either contain a float value or a histogram value, use an `FPoint` for
floats and an `HPoint` for histograms.
This seemingly small change has a _lot_ of repercussions throughout
the codebase.
The idea here is to avoid the increase in size of `Point` arrays that
happened after native histograms had been added.
The higher-level data structures (`Sample`, `Series`, etc.) are still
“polymorphous”. The same idea could be applied to them, but at each
step the trade-offs needed to be evaluated.
The idea with this change is to do the minimum necessary to get back
to pre-histogram performance for functions that do not touch
histograms. Here are comparisons for the `changes` function. The test
data doesn't include histograms yet. Ideally, there would be no change
in the benchmark result at all.
First runtime v2.39 compared to directly prior to this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 542µs ± 1% +38.58% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 617µs ± 2% +36.48% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.36ms ± 2% +21.58% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 8.94ms ± 1% +14.21% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.30ms ± 1% +10.67% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.10ms ± 1% +11.82% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 11.8ms ± 1% +12.50% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 87.4ms ± 1% +12.63% (p=0.000 n=9+9)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 32.8ms ± 1% +8.01% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.6ms ± 2% +9.64% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 117ms ± 1% +11.69% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 876ms ± 1% +11.83% (p=0.000 n=9+10)
```
And then runtime v2.39 compared to after this commit:
```
name old time/op new time/op delta
RangeQuery/expr=changes(a_one[1d]),steps=1-16 391µs ± 2% 547µs ± 1% +39.84% (p=0.000 n=9+8)
RangeQuery/expr=changes(a_one[1d]),steps=10-16 452µs ± 2% 616µs ± 2% +36.15% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_one[1d]),steps=100-16 1.12ms ± 1% 1.26ms ± 1% +12.20% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_one[1d]),steps=1000-16 7.83ms ± 1% 7.95ms ± 1% +1.59% (p=0.000 n=10+8)
RangeQuery/expr=changes(a_ten[1d]),steps=1-16 2.98ms ± 0% 3.38ms ± 2% +13.49% (p=0.000 n=9+10)
RangeQuery/expr=changes(a_ten[1d]),steps=10-16 3.66ms ± 1% 4.02ms ± 1% +9.80% (p=0.000 n=10+9)
RangeQuery/expr=changes(a_ten[1d]),steps=100-16 10.5ms ± 0% 10.8ms ± 1% +3.08% (p=0.000 n=8+10)
RangeQuery/expr=changes(a_ten[1d]),steps=1000-16 77.6ms ± 1% 78.1ms ± 1% +0.58% (p=0.035 n=9+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1-16 30.4ms ± 2% 33.5ms ± 4% +10.18% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=10-16 37.1ms ± 2% 40.0ms ± 1% +7.98% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=100-16 105ms ± 1% 107ms ± 1% +1.92% (p=0.000 n=10+10)
RangeQuery/expr=changes(a_hundred[1d]),steps=1000-16 783ms ± 3% 775ms ± 1% -1.02% (p=0.019 n=9+9)
```
In summary, the runtime doesn't really improve with this change for
queries with just a few steps. For queries with many steps, this
commit essentially reinstates the old performance. This is good
because the many-step queries are the one that matter most (longest
absolute runtime).
In terms of allocations, though, this commit doesn't make a dent at
all (numbers not shown). The reason is that most of the allocations
happen in the sampleRingIterator (in the storage package), which has
to be addressed in a separate commit.
Signed-off-by: beorn7 <beorn@grafana.com>
2022-10-28 14:58:40 +00:00
points = append ( points , promql . FPoint { F : float64 ( i * 1000000 ) , T : int64 ( i ) } )
2018-02-07 15:40:36 +00:00
}
2023-07-13 14:36:38 +00:00
matrix := promql . Matrix { }
for i := 0 ; i < 1000 ; i ++ {
matrix = append ( matrix , promql . Series {
Metric : labels . FromStrings ( "__name__" , fmt . Sprintf ( "series%v" , i ) ,
"label" , fmt . Sprintf ( "series%v" , i ) ,
"label2" , fmt . Sprintf ( "series%v" , i ) ) ,
Floats : points [ : 10 ] ,
} )
2018-02-07 15:40:36 +00:00
}
2023-07-13 14:36:38 +00:00
series := [ ] labels . Labels { }
for i := 0 ; i < 1000 ; i ++ {
series = append ( series , labels . FromStrings ( "__name__" , fmt . Sprintf ( "series%v" , i ) ,
"label" , fmt . Sprintf ( "series%v" , i ) ,
"label2" , fmt . Sprintf ( "series%v" , i ) ) )
}
cases := [ ] struct {
name string
response interface { }
} {
{ name : "10000 points no labels" , response : & QueryData {
ResultType : parser . ValueTypeMatrix ,
Result : promql . Matrix {
promql . Series {
Floats : points ,
Metric : labels . EmptyLabels ( ) ,
} ,
} ,
} } ,
{ name : "1000 labels" , response : series } ,
{ name : "1000 series 10 points" , response : & QueryData {
ResultType : parser . ValueTypeMatrix ,
Result : matrix ,
} } ,
}
for _ , c := range cases {
b . Run ( c . name , func ( b * testing . B ) {
b . ReportAllocs ( )
request , err := http . NewRequest ( http . MethodGet , "/does-not-matter" , nil )
require . NoError ( b , err )
b . ResetTimer ( )
api := API { }
api . InstallCodec ( JSONCodec { } )
for n := 0 ; n < b . N ; n ++ {
2023-09-14 16:57:31 +00:00
api . respond ( & testResponseWriter , request , c . response , nil , "" )
2023-07-13 14:36:38 +00:00
}
} )
2018-02-07 15:40:36 +00:00
}
}
2021-02-05 11:45:44 +00:00
func TestGetGlobalURL ( t * testing . T ) {
mustParseURL := func ( t * testing . T , u string ) * url . URL {
parsed , err := url . Parse ( u )
require . NoError ( t , err )
return parsed
}
testcases := [ ] struct {
input * url . URL
opts GlobalURLOptions
expected * url . URL
errorful bool
} {
{
mustParseURL ( t , "http://127.0.0.1:9090" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "127.0.0.1:9090" ,
Scheme : "http" ,
} ,
mustParseURL ( t , "http://127.0.0.1:9090" ) ,
false ,
} ,
{
mustParseURL ( t , "http://127.0.0.1:9090" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "prometheus.io" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "https://prometheus.io" ) ,
false ,
} ,
{
mustParseURL ( t , "http://exemple.com" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "prometheus.io" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://exemple.com" ) ,
false ,
} ,
{
mustParseURL ( t , "http://localhost:8080" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "prometheus.io" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://prometheus.io:8080" ) ,
false ,
} ,
{
mustParseURL ( t , "http://[::1]:8080" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "prometheus.io" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://prometheus.io:8080" ) ,
false ,
} ,
{
mustParseURL ( t , "http://localhost" ) ,
GlobalURLOptions {
ListenAddress : "127.0.0.1:9090" ,
Host : "prometheus.io" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://prometheus.io" ) ,
false ,
} ,
{
mustParseURL ( t , "http://localhost:9091" ) ,
GlobalURLOptions {
ListenAddress : "[::1]:9090" ,
Host : "[::1]" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://[::1]:9091" ) ,
false ,
} ,
{
mustParseURL ( t , "http://localhost:9091" ) ,
GlobalURLOptions {
ListenAddress : "[::1]:9090" ,
Host : "[::1]:9090" ,
Scheme : "https" ,
} ,
mustParseURL ( t , "http://[::1]:9091" ) ,
false ,
} ,
}
for i , tc := range testcases {
t . Run ( fmt . Sprintf ( "Test %d" , i ) , func ( t * testing . T ) {
output , err := getGlobalURL ( tc . input , tc . opts )
if tc . errorful {
require . Error ( t , err )
return
}
require . NoError ( t , err )
require . Equal ( t , tc . expected , output )
} )
}
}
2023-03-07 23:28:31 +00:00
2023-01-25 03:30:47 +00:00
type testCodec struct {
2023-02-27 02:27:09 +00:00
contentType MIMEType
2023-01-25 03:30:47 +00:00
canEncode bool
}
2023-02-27 02:27:09 +00:00
func ( t * testCodec ) ContentType ( ) MIMEType {
2023-01-25 03:30:47 +00:00
return t . contentType
}
func ( t * testCodec ) CanEncode ( _ * Response ) bool {
return t . canEncode
}
func ( t * testCodec ) Encode ( _ * Response ) ( [ ] byte , error ) {
return [ ] byte ( fmt . Sprintf ( "response from %v codec" , t . contentType ) ) , nil
}
2023-03-09 01:06:26 +00:00
2023-03-07 23:28:31 +00:00
func TestExtractQueryOpts ( t * testing . T ) {
tests := [ ] struct {
name string
form url . Values
2023-07-03 12:56:06 +00:00
expect promql . QueryOpts
2023-03-07 23:28:31 +00:00
err error
} {
{
name : "with stats all" ,
form : url . Values {
"stats" : [ ] string { "all" } ,
} ,
2023-07-03 12:56:06 +00:00
expect : promql . NewPrometheusQueryOpts ( true , 0 ) ,
2023-03-07 23:28:31 +00:00
err : nil ,
} ,
{
name : "with stats none" ,
form : url . Values {
"stats" : [ ] string { "none" } ,
} ,
2023-07-03 12:56:06 +00:00
expect : promql . NewPrometheusQueryOpts ( false , 0 ) ,
err : nil ,
2023-03-07 23:28:31 +00:00
} ,
{
name : "with lookback delta" ,
form : url . Values {
"stats" : [ ] string { "all" } ,
"lookback_delta" : [ ] string { "30s" } ,
} ,
2023-07-03 12:56:06 +00:00
expect : promql . NewPrometheusQueryOpts ( true , 30 * time . Second ) ,
err : nil ,
2023-03-07 23:28:31 +00:00
} ,
{
name : "with invalid lookback delta" ,
form : url . Values {
"lookback_delta" : [ ] string { "invalid" } ,
} ,
expect : nil ,
err : errors . New ( ` error parsing lookback delta duration: cannot parse "invalid" to a valid duration ` ) ,
} ,
}
for _ , test := range tests {
t . Run ( test . name , func ( t * testing . T ) {
req := & http . Request { Form : test . form }
opts , err := extractQueryOpts ( req )
require . Equal ( t , test . expect , opts )
if test . err == nil {
require . NoError ( t , err )
} else {
require . Equal ( t , test . err . Error ( ) , err . Error ( ) )
}
} )
}
}
2023-10-04 08:36:55 +00:00
// Test query timeout parameter.
func TestQueryTimeout ( t * testing . T ) {
storage := promql . LoadedStorage ( t , `
load 1 m
test_metric1 { foo = "bar" } 0 + 100 x100
` )
t . Cleanup ( func ( ) {
_ = storage . Close ( )
} )
now := time . Now ( )
for _ , tc := range [ ] struct {
name string
method string
} {
{
name : "GET method" ,
method : http . MethodGet ,
} ,
{
name : "POST method" ,
method : http . MethodPost ,
} ,
} {
t . Run ( tc . name , func ( t * testing . T ) {
engine := & fakeEngine { }
api := & API {
Queryable : storage ,
QueryEngine : engine ,
ExemplarQueryable : storage . ExemplarQueryable ( ) ,
alertmanagerRetriever : testAlertmanagerRetriever { } . toFactory ( ) ,
flagsMap : sampleFlagMap ,
now : func ( ) time . Time { return now } ,
config : func ( ) config . Config { return samplePrometheusCfg } ,
ready : func ( f http . HandlerFunc ) http . HandlerFunc { return f } ,
}
query := url . Values {
"query" : [ ] string { "2" } ,
"timeout" : [ ] string { "1s" } ,
}
ctx := context . Background ( )
req , err := http . NewRequest ( tc . method , fmt . Sprintf ( "http://example.com?%s" , query . Encode ( ) ) , nil )
require . NoError ( t , err )
req . RemoteAddr = "127.0.0.1:20201"
res := api . query ( req . WithContext ( ctx ) )
assertAPIError ( t , res . err , errorNone )
require . Len ( t , engine . query . execCalls , 1 )
deadline , ok := engine . query . execCalls [ 0 ] . Deadline ( )
require . True ( t , ok )
require . Equal ( t , now . Add ( time . Second ) , deadline )
} )
}
}
// fakeEngine is a fake QueryEngine implementation.
type fakeEngine struct {
query fakeQuery
}
func ( e * fakeEngine ) SetQueryLogger ( promql . QueryLogger ) { }
func ( e * fakeEngine ) NewInstantQuery ( ctx context . Context , q storage . Queryable , opts promql . QueryOpts , qs string , ts time . Time ) ( promql . Query , error ) {
return & e . query , nil
}
func ( e * fakeEngine ) NewRangeQuery ( ctx context . Context , q storage . Queryable , opts promql . QueryOpts , qs string , start , end time . Time , interval time . Duration ) ( promql . Query , error ) {
return & e . query , nil
}
// fakeQuery is a fake Query implementation.
type fakeQuery struct {
query string
execCalls [ ] context . Context
}
func ( q * fakeQuery ) Exec ( ctx context . Context ) * promql . Result {
q . execCalls = append ( q . execCalls , ctx )
return & promql . Result {
Value : & parser . StringLiteral {
Val : "test" ,
} ,
}
}
func ( q * fakeQuery ) Close ( ) { }
func ( q * fakeQuery ) Statement ( ) parser . Statement {
return nil
}
func ( q * fakeQuery ) Stats ( ) * stats . Statistics {
return nil
}
func ( q * fakeQuery ) Cancel ( ) { }
func ( q * fakeQuery ) String ( ) string {
return q . query
}