-h
, --help
| Show context-sensitive help (also try --help-long and --help-man). | |
| --version
| Show application version. | |
| --config.file
| Prometheus configuration file path. | `prometheus.yml` |
+| --config.auto-reload-interval
| Specifies the interval for checking and automatically reloading the Prometheus configuration file upon detecting changes. | `30s` |
| --web.listen-address
... | Address to listen on for UI, API, and telemetry. Can be repeated. | `0.0.0.0:9090` |
| --auto-gomemlimit.ratio
| The ratio of reserved GOMEMLIMIT memory to the detected maximum container or system memory | `0.9` |
| --web.config.file
| [EXPERIMENTAL] Path to configuration file that can enable TLS or authentication. | |
diff --git a/docs/command-line/promtool.md b/docs/command-line/promtool.md
index 6d74200e6..e48cede79 100644
--- a/docs/command-line/promtool.md
+++ b/docs/command-line/promtool.md
@@ -575,7 +575,7 @@ Dump samples from a TSDB.
| Flag | Description | Default |
| --- | --- | --- |
-| --sandbox-dir-root
| Root directory where a sandbox directory would be created in case WAL replay generates chunks. The sandbox directory is cleaned up at the end. | `data/` |
+| --sandbox-dir-root
| Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end. | |
| --min-time
| Minimum timestamp to dump. | `-9223372036854775808` |
| --max-time
| Maximum timestamp to dump. | `9223372036854775807` |
| --match
... | Series selector. Can be specified multiple times. | `{__name__=~'(?s:.*)'}` |
@@ -602,7 +602,7 @@ Dump samples from a TSDB.
| Flag | Description | Default |
| --- | --- | --- |
-| --sandbox-dir-root
| Root directory where a sandbox directory would be created in case WAL replay generates chunks. The sandbox directory is cleaned up at the end. | `data/` |
+| --sandbox-dir-root
| Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end. | |
| --min-time
| Minimum timestamp to dump. | `-9223372036854775808` |
| --max-time
| Maximum timestamp to dump. | `9223372036854775807` |
| --match
... | Series selector. Can be specified multiple times. | `{__name__=~'(?s:.*)'}` |
diff --git a/docs/configuration/configuration.md b/docs/configuration/configuration.md
index 64a11f1c3..80bb3afee 100644
--- a/docs/configuration/configuration.md
+++ b/docs/configuration/configuration.md
@@ -84,6 +84,10 @@ global:
# Reloading the configuration will reopen the file.
[ query_log_file: ]
+ # File to which scrape failures are logged.
+ # Reloading the configuration will reopen the file.
+ [ scrape_failure_log_file: ]
+
# An uncompressed response body larger than this many bytes will cause the
# scrape to fail. 0 means no limit. Example: 100MB.
# This is an experimental feature, this behaviour could
@@ -319,6 +323,10 @@ http_headers:
# Files to read header values from.
[ files: [, ...] ] ]
+# File to which scrape failures are logged.
+# Reloading the configuration will reopen the file.
+[ scrape_failure_log_file: ]
+
# List of Azure service discovery configurations.
azure_sd_configs:
[ - ... ]
@@ -608,6 +616,18 @@ tls_config:
# Specifies headers to send to proxies during CONNECT requests.
[ proxy_connect_header:
[ : [, ...] ] ]
+
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
```
### ``
@@ -699,6 +719,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -812,6 +844,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -899,6 +943,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -957,6 +1013,18 @@ host:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# TLS configuration.
tls_config:
[ ]
@@ -1137,6 +1205,18 @@ host:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# TLS configuration.
tls_config:
[ ]
@@ -1346,6 +1426,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -1623,6 +1715,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -1849,6 +1953,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -1943,6 +2059,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2026,6 +2154,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2264,6 +2404,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2355,6 +2507,18 @@ server:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# TLS configuration.
tls_config:
[ ]
@@ -2482,6 +2646,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2571,6 +2747,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2678,6 +2866,18 @@ tls_config:
# Specifies headers to send to proxies during CONNECT requests.
[ proxy_connect_header:
[ : [, ...] ] ]
+
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
```
By default every app listed in Marathon will be scraped by Prometheus. If not all
@@ -2777,6 +2977,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -2963,6 +3175,18 @@ tls_config:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -3089,6 +3313,18 @@ tags_filter:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# TLS configuration.
tls_config:
[ ]
@@ -3165,6 +3401,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -3247,6 +3495,18 @@ oauth2:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -3472,6 +3732,18 @@ tls_config:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -3735,6 +4007,18 @@ tls_config:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
@@ -3856,6 +4140,18 @@ tls_config:
[ proxy_connect_header:
[ : [, ...] ] ]
+# Custom HTTP headers to be sent along with each request.
+# Headers that are set by Prometheus itself can't be overwritten.
+http_headers:
+ # Header name.
+ [ :
+ # Header values.
+ [ values: [, ...] ]
+ # Headers values. Hidden in configuration page.
+ [ secrets: [, ...] ]
+ # Files to read header values from.
+ [ files: [, ...] ] ]
+
# Configure whether HTTP requests follow HTTP 3xx redirects.
[ follow_redirects: | default = true ]
diff --git a/docs/feature_flags.md b/docs/feature_flags.md
index 46905fb83..ba140c992 100644
--- a/docs/feature_flags.md
+++ b/docs/feature_flags.md
@@ -171,7 +171,7 @@ This should **only** be applied to metrics that currently produce such labels.
`--enable-feature=otlp-write-receiver`
The OTLP receiver allows Prometheus to accept [OpenTelemetry](https://opentelemetry.io/) metrics writes.
-Prometheus is best used as a Pull based system, and staleness, `up` metric, and other Pull enabled features
+Prometheus is best used as a Pull based system, and staleness, `up` metric, and other Pull enabled features
won't work when you push OTLP metrics.
## Experimental PromQL functions
@@ -204,6 +204,12 @@ This has the potential to improve rule group evaluation latency and resource uti
The number of concurrent rule evaluations can be configured with `--rules.max-concurrent-rule-evals`, which is set to `4` by default.
+## Serve old Prometheus UI
+
+Fall back to serving the old (Prometheus 2.x) web UI instead of the new UI. The new UI that was released as part of Prometheus 3.0 is a complete rewrite and aims to be cleaner, less cluttered, and more modern under the hood. However, it is not fully feature complete and battle-tested yet, so some users may still prefer using the old UI.
+
+`--enable-feature=old-ui`
+
## Metadata WAL Records
`--enable-feature=metadata-wal-records`
@@ -236,3 +242,15 @@ When enabled, Prometheus will change the way in which the `__name__` label is re
This allows optionally preserving the `__name__` label via the `label_replace` and `label_join` functions, and helps prevent the "vector cannot contain metrics with the same labelset" error, which can happen when applying a regex-matcher to the `__name__` label.
+## Auto Reload Config
+
+`--enable-feature=auto-reload-config`
+
+When enabled, Prometheus will automatically reload its configuration file at a
+specified interval. The interval is defined by the
+`--config.auto-reload-interval` flag, which defaults to `30s`.
+
+Configuration reloads are triggered by detecting changes in the checksum of the
+main configuration file or any referenced files, such as rule and scrape
+configurations. To ensure consistency and avoid issues during reloads, it's
+recommended to update these files atomically.
diff --git a/docs/querying/api.md b/docs/querying/api.md
index efa244fbc..e32c8ecaf 100644
--- a/docs/querying/api.md
+++ b/docs/querying/api.md
@@ -239,6 +239,75 @@ $ curl 'http://localhost:9090/api/v1/format_query?query=foo/bar'
}
```
+## Parsing a PromQL expressions into a abstract syntax tree (AST)
+
+This endpoint is **experimental** and might change in the future. It is currently only meant to be used by Prometheus' own web UI, and the endpoint name and exact format returned may change from one Prometheus version to another. It may also be removed again in case it is no longer needed by the UI.
+
+The following endpoint parses a PromQL expression and returns it as a JSON-formatted AST (abstract syntax tree) representation:
+
+```
+GET /api/v1/parse_query
+POST /api/v1/parse_query
+```
+
+URL query parameters:
+
+- `query=`: Prometheus expression query string.
+
+You can URL-encode these parameters directly in the request body by using the `POST` method and
+`Content-Type: application/x-www-form-urlencoded` header. This is useful when specifying a large
+query that may breach server-side URL character limits.
+
+The `data` section of the query result is a string containing the AST of the parsed query expression.
+
+The following example parses the expression `foo/bar`:
+
+```json
+$ curl 'http://localhost:9090/api/v1/parse_query?query=foo/bar'
+{
+ "data" : {
+ "bool" : false,
+ "lhs" : {
+ "matchers" : [
+ {
+ "name" : "__name__",
+ "type" : "=",
+ "value" : "foo"
+ }
+ ],
+ "name" : "foo",
+ "offset" : 0,
+ "startOrEnd" : null,
+ "timestamp" : null,
+ "type" : "vectorSelector"
+ },
+ "matching" : {
+ "card" : "one-to-one",
+ "include" : [],
+ "labels" : [],
+ "on" : false
+ },
+ "op" : "/",
+ "rhs" : {
+ "matchers" : [
+ {
+ "name" : "__name__",
+ "type" : "=",
+ "value" : "bar"
+ }
+ ],
+ "name" : "bar",
+ "offset" : 0,
+ "startOrEnd" : null,
+ "timestamp" : null,
+ "type" : "vectorSelector"
+ },
+ "type" : "binaryExpr"
+ },
+ "status" : "success"
+}
+```
+
## Querying metadata
Prometheus offers a set of API endpoints to query metadata about series and their labels.
@@ -693,7 +762,7 @@ URL query parameters:
- `rule_name[]=`: only return rules with the given rule name. If the parameter is repeated, rules with any of the provided names are returned. If we've filtered out all the rules of a group, the group is not returned. When the parameter is absent or empty, no filtering is done.
- `rule_group[]=`: only return rules with the given rule group name. If the parameter is repeated, rules with any of the provided rule group names are returned. When the parameter is absent or empty, no filtering is done.
- `file[]=`: only return rules with the given filepath. If the parameter is repeated, rules with any of the provided filepaths are returned. When the parameter is absent or empty, no filtering is done.
-- `exclude_alerts=`: only return rules, do not return active alerts.
+- `exclude_alerts=`: only return rules, do not return active alerts.
- `match[]=`: only return rules that have configured labels that satisfy the label selectors. If the parameter is repeated, rules that match any of the sets of label selectors are returned. Note that matching is on the labels in the definition of each rule, not on the values after template expansion (for alerting rules). Optional.
```json
diff --git a/docs/storage.md b/docs/storage.md
index 8cac19422..d5193bf5b 100644
--- a/docs/storage.md
+++ b/docs/storage.md
@@ -155,31 +155,27 @@ a set of interfaces that allow integrating with remote storage systems.
### Overview
-Prometheus integrates with remote storage systems in three ways:
+Prometheus integrates with remote storage systems in four ways:
-- Prometheus can write samples that it ingests to a remote URL in a standardized format.
-- Prometheus can receive samples from other Prometheus servers in a standardized format.
-- Prometheus can read (back) sample data from a remote URL in a standardized format.
+- Prometheus can write samples that it ingests to a remote URL in a [Remote Write format](https://prometheus.io/docs/specs/remote_write_spec_2_0/).
+- Prometheus can receive samples from other clients in a [Remote Write format](https://prometheus.io/docs/specs/remote_write_spec_2_0/).
+- Prometheus can read (back) sample data from a remote URL in a [Remote Read format](https://github.com/prometheus/prometheus/blob/main/prompb/remote.proto#L31).
+- Prometheus can return sample data requested by clients in a [Remote Read format](https://github.com/prometheus/prometheus/blob/main/prompb/remote.proto#L31).
![Remote read and write architecture](images/remote_integrations.png)
-The read and write protocols both use a snappy-compressed protocol buffer encoding over
-HTTP. The protocols are not considered as stable APIs yet and may change to use gRPC
-over HTTP/2 in the future, when all hops between Prometheus and the remote storage can
-safely be assumed to support HTTP/2.
+The remote read and write protocols both use a snappy-compressed protocol buffer encoding over
+HTTP. The read protocol is not yet considered as stable API.
-For details on configuring remote storage integrations in Prometheus, see the
+The write protocol has a [stable specification for 1.0 version](https://prometheus.io/docs/specs/remote_write_spec/)
+and [experimental specification for 2.0 version](https://prometheus.io/docs/specs/remote_write_spec_2_0/),
+both supported by Prometheus server.
+
+For details on configuring remote storage integrations in Prometheus as a client, see the
[remote write](configuration/configuration.md#remote_write) and
[remote read](configuration/configuration.md#remote_read) sections of the Prometheus
configuration documentation.
-The built-in remote write receiver can be enabled by setting the
-`--web.enable-remote-write-receiver` command line flag. When enabled,
-the remote write receiver endpoint is `/api/v1/write`.
-
-For details on the request and response messages, see the
-[remote storage protocol buffer definitions](https://github.com/prometheus/prometheus/blob/main/prompb/remote.proto).
-
Note that on the read path, Prometheus only fetches raw series data for a set of
label selectors and time ranges from the remote end. All PromQL evaluation on the
raw data still happens in Prometheus itself. This means that remote read queries
@@ -187,6 +183,11 @@ have some scalability limit, since all necessary data needs to be loaded into th
querying Prometheus server first and then processed there. However, supporting
fully distributed evaluation of PromQL was deemed infeasible for the time being.
+Prometheus also serves both protocols. The built-in remote write receiver can be enabled
+by setting the `--web.enable-remote-write-receiver` command line flag. When enabled,
+the remote write receiver endpoint is `/api/v1/write`. The remote read endpoint is
+available on [`/api/v1/read`](https://prometheus.io/docs/prometheus/latest/querying/remote_read_api/).
+
### Existing integrations
To learn more about existing integrations with remote storage systems, see the
diff --git a/documentation/examples/remote_storage/go.mod b/documentation/examples/remote_storage/go.mod
index e5e052469..8ed5084d9 100644
--- a/documentation/examples/remote_storage/go.mod
+++ b/documentation/examples/remote_storage/go.mod
@@ -1,14 +1,14 @@
module github.com/prometheus/prometheus/documentation/examples/remote_storage
-go 1.21.0
+go 1.22.0
require (
github.com/alecthomas/kingpin/v2 v2.4.0
github.com/go-kit/log v0.2.1
github.com/gogo/protobuf v1.3.2
github.com/golang/snappy v0.0.4
- github.com/influxdata/influxdb v1.11.5
- github.com/prometheus/client_golang v1.20.0
+ github.com/influxdata/influxdb v1.11.6
+ github.com/prometheus/client_golang v1.20.2
github.com/prometheus/common v0.57.0
github.com/prometheus/prometheus v0.53.1
github.com/stretchr/testify v1.9.0
diff --git a/documentation/examples/remote_storage/go.sum b/documentation/examples/remote_storage/go.sum
index 34c474ef8..1abeff7eb 100644
--- a/documentation/examples/remote_storage/go.sum
+++ b/documentation/examples/remote_storage/go.sum
@@ -166,8 +166,8 @@ github.com/hetznercloud/hcloud-go/v2 v2.9.0 h1:s0N6R7Zoi2DPfMtUF5o9VeUBzTtHVY6MI
github.com/hetznercloud/hcloud-go/v2 v2.9.0/go.mod h1:qtW/TuU7Bs16ibXl/ktJarWqU2LwHr7eGlwoilHxtgg=
github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4=
github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
-github.com/influxdata/influxdb v1.11.5 h1:+em5VOl6lhAZubXj5o6SobCwvrRs3XDlBx/MUI4schI=
-github.com/influxdata/influxdb v1.11.5/go.mod h1:k8sWREQl1/9t46VrkrH5adUM4UNGIt206ipO3plbkw8=
+github.com/influxdata/influxdb v1.11.6 h1:zS5MRY+RQ5/XFTer5R8xQRnY17JYSbacvO6OaP164wU=
+github.com/influxdata/influxdb v1.11.6/go.mod h1:F10NoQb9qa04lME3pTPWQrYt4JZ/ke1Eei+1ttgHHrg=
github.com/ionos-cloud/sdk-go/v6 v6.1.11 h1:J/uRN4UWO3wCyGOeDdMKv8LWRzKu6UIkLEaes38Kzh8=
github.com/ionos-cloud/sdk-go/v6 v6.1.11/go.mod h1:EzEgRIDxBELvfoa/uBN0kOQaqovLjUWEB7iW4/Q+t4k=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
@@ -253,8 +253,8 @@ github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXP
github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
-github.com/prometheus/client_golang v1.20.0 h1:jBzTZ7B099Rg24tny+qngoynol8LtVYlA2bqx3vEloI=
-github.com/prometheus/client_golang v1.20.0/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
+github.com/prometheus/client_golang v1.20.2 h1:5ctymQzZlyOON1666svgwn3s6IKWgfbjsejTMiXIyjg=
+github.com/prometheus/client_golang v1.20.2/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
diff --git a/go.mod b/go.mod
index 9a4a35c4d..845e3277b 100644
--- a/go.mod
+++ b/go.mod
@@ -1,11 +1,11 @@
module github.com/prometheus/prometheus
-go 1.21.0
+go 1.22.0
-toolchain go1.22.5
+toolchain go1.23.0
require (
- github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0
+ github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v5 v5.7.0
github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/network/armnetwork/v4 v4.3.0
@@ -17,10 +17,10 @@ require (
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3
github.com/cespare/xxhash/v2 v2.3.0
github.com/dennwc/varint v1.0.0
- github.com/digitalocean/godo v1.119.0
- github.com/docker/docker v27.1.1+incompatible
+ github.com/digitalocean/godo v1.122.0
+ github.com/docker/docker v27.2.0+incompatible
github.com/edsrzf/mmap-go v1.1.0
- github.com/envoyproxy/go-control-plane v0.12.0
+ github.com/envoyproxy/go-control-plane v0.13.0
github.com/envoyproxy/protoc-gen-validate v1.1.0
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb
github.com/fsnotify/fsnotify v1.7.0
@@ -43,8 +43,8 @@ require (
github.com/json-iterator/go v1.1.12
github.com/klauspost/compress v1.17.9
github.com/kolo/xmlrpc v0.0.0-20220921171641-a4b6fa1dd06b
- github.com/linode/linodego v1.38.0
- github.com/miekg/dns v1.1.61
+ github.com/linode/linodego v1.40.0
+ github.com/miekg/dns v1.1.62
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f
github.com/nsf/jsondiff v0.0.0-20230430225905-43f6cf3098c1
@@ -52,51 +52,51 @@ require (
github.com/oklog/ulid v1.3.1
github.com/ovh/go-ovh v1.6.0
github.com/prometheus/alertmanager v0.27.0
- github.com/prometheus/client_golang v1.20.2
+ github.com/prometheus/client_golang v1.20.3
github.com/prometheus/client_model v0.6.1
- github.com/prometheus/common v0.56.0
+ github.com/prometheus/common v0.59.1
github.com/prometheus/common/assets v0.2.0
github.com/prometheus/common/sigv4 v0.1.0
- github.com/prometheus/exporter-toolkit v0.11.0
- github.com/scaleway/scaleway-sdk-go v1.0.0-beta.29
+ github.com/prometheus/exporter-toolkit v0.12.0
+ github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c
github.com/stretchr/testify v1.9.0
github.com/vultr/govultr/v2 v2.17.2
- go.opentelemetry.io/collector/pdata v1.12.0
- go.opentelemetry.io/collector/semconv v0.105.0
- go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0
- go.opentelemetry.io/otel v1.28.0
- go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0
- go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0
- go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0
- go.opentelemetry.io/otel/sdk v1.28.0
- go.opentelemetry.io/otel/trace v1.28.0
+ go.opentelemetry.io/collector/pdata v1.14.1
+ go.opentelemetry.io/collector/semconv v0.108.1
+ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0
+ go.opentelemetry.io/otel v1.29.0
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.29.0
+ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0
+ go.opentelemetry.io/otel/sdk v1.29.0
+ go.opentelemetry.io/otel/trace v1.29.0
go.uber.org/atomic v1.11.0
go.uber.org/automaxprocs v1.5.3
go.uber.org/goleak v1.3.0
go.uber.org/multierr v1.11.0
- golang.org/x/oauth2 v0.22.0
- golang.org/x/sync v0.7.0
- golang.org/x/sys v0.22.0
- golang.org/x/text v0.16.0
- golang.org/x/time v0.5.0
- golang.org/x/tools v0.23.0
- google.golang.org/api v0.190.0
- google.golang.org/genproto/googleapis/api v0.0.0-20240725223205-93522f1f2a9f
- google.golang.org/grpc v1.65.0
+ golang.org/x/oauth2 v0.23.0
+ golang.org/x/sync v0.8.0
+ golang.org/x/sys v0.25.0
+ golang.org/x/text v0.18.0
+ golang.org/x/time v0.6.0
+ golang.org/x/tools v0.24.0
+ google.golang.org/api v0.196.0
+ google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed
+ google.golang.org/grpc v1.66.0
google.golang.org/protobuf v1.34.2
gopkg.in/yaml.v2 v2.4.0
gopkg.in/yaml.v3 v3.0.1
- k8s.io/api v0.29.3
- k8s.io/apimachinery v0.29.3
- k8s.io/client-go v0.29.3
+ k8s.io/api v0.31.0
+ k8s.io/apimachinery v0.31.0
+ k8s.io/client-go v0.31.0
k8s.io/klog v1.0.0
k8s.io/klog/v2 v2.130.1
)
require (
- cloud.google.com/go/auth v0.7.3 // indirect
- cloud.google.com/go/auth/oauth2adapt v0.2.3 // indirect
+ cloud.google.com/go/auth v0.9.3 // indirect
+ cloud.google.com/go/auth/oauth2adapt v0.2.4 // indirect
cloud.google.com/go/compute/metadata v0.5.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
@@ -115,9 +115,9 @@ require (
github.com/docker/go-connections v0.4.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/emicklei/go-restful/v3 v3.11.0 // indirect
- github.com/evanphx/json-patch v5.6.0+incompatible // indirect
github.com/fatih/color v1.16.0 // indirect
github.com/felixge/httpsnoop v1.0.4 // indirect
+ github.com/fxamacker/cbor/v2 v2.7.0 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/go-kit/kit v0.12.0 // indirect
github.com/go-logr/logr v1.4.2 // indirect
@@ -140,10 +140,10 @@ require (
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/google/s2a-go v0.1.8 // indirect
- github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
+ github.com/googleapis/enterprise-certificate-proxy v0.3.3 // indirect
github.com/googleapis/gax-go/v2 v2.13.0 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
- github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 // indirect
+ github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect
github.com/hashicorp/cronexpr v1.1.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
@@ -163,6 +163,8 @@ require (
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
+ github.com/mdlayher/socket v0.4.1 // indirect
+ github.com/mdlayher/vsock v1.2.1 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
@@ -176,35 +178,38 @@ require (
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pkg/errors v0.9.1 // indirect
+ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/procfs v0.15.1 // indirect
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/objx v0.5.2 // indirect
+ github.com/x448/float16 v0.8.4 // indirect
github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.opencensus.io v0.24.0 // indirect
- go.opentelemetry.io/otel/metric v1.28.0 // indirect
+ go.opentelemetry.io/otel/metric v1.29.0 // indirect
go.opentelemetry.io/proto/otlp v1.3.1 // indirect
- golang.org/x/crypto v0.25.0 // indirect
+ golang.org/x/crypto v0.26.0 // indirect
golang.org/x/exp v0.0.0-20240119083558-1b970713d09a // indirect
- golang.org/x/mod v0.19.0 // indirect
- golang.org/x/net v0.27.0 // indirect
- golang.org/x/term v0.22.0 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240730163845-b1a4ccb954bf // indirect
+ golang.org/x/mod v0.20.0 // indirect
+ golang.org/x/net v0.28.0 // indirect
+ golang.org/x/term v0.23.0 // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
+ gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gotest.tools/v3 v3.0.3 // indirect
k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 // indirect
- k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect
+ k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 // indirect
sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect
- sigs.k8s.io/yaml v1.3.0 // indirect
+ sigs.k8s.io/yaml v1.4.0 // indirect
)
replace (
k8s.io/klog => github.com/simonpasquier/klog-gokit v0.3.0
- k8s.io/klog/v2 => github.com/simonpasquier/klog-gokit/v3 v3.3.0
+ k8s.io/klog/v2 => github.com/simonpasquier/klog-gokit/v3 v3.5.0
)
// Exclude linodego v1.0.0 as it is no longer published on github.
diff --git a/go.sum b/go.sum
index 7e763af26..edb5b650b 100644
--- a/go.sum
+++ b/go.sum
@@ -12,10 +12,10 @@ cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bP
cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
-cloud.google.com/go/auth v0.7.3 h1:98Vr+5jMaCZ5NZk6e/uBgf60phTk/XN84r8QEWB9yjY=
-cloud.google.com/go/auth v0.7.3/go.mod h1:HJtWUx1P5eqjy/f6Iq5KeytNpbAcGolPhOgyop2LlzA=
-cloud.google.com/go/auth/oauth2adapt v0.2.3 h1:MlxF+Pd3OmSudg/b1yZ5lJwoXCEaeedAguodky1PcKI=
-cloud.google.com/go/auth/oauth2adapt v0.2.3/go.mod h1:tMQXOfZzFuNuUxOypHlQEXgdfX5cuhwU+ffUuXRJE8I=
+cloud.google.com/go/auth v0.9.3 h1:VOEUIAADkkLtyfr3BLa3R8Ed/j6w1jTBmARx+wb5w5U=
+cloud.google.com/go/auth v0.9.3/go.mod h1:7z6VY+7h3KUdRov5F1i8NDP5ZzWKYmEPO842BgCsmTk=
+cloud.google.com/go/auth/oauth2adapt v0.2.4 h1:0GWE/FUsXhf6C+jAkWgYm7X9tK8cuEIfy19DBn6B6bY=
+cloud.google.com/go/auth/oauth2adapt v0.2.4/go.mod h1:jC/jOpwFP6JBxhB3P5Rr0a9HLMC/Pe3eaL4NmdvqPtc=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
@@ -36,8 +36,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl
cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
-github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0 h1:GJHeeA2N7xrG3q30L2UXDyuWRzDM900/65j70wcM4Ww=
-github.com/Azure/azure-sdk-for-go/sdk/azcore v1.13.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0 h1:nyQWyZvwGTvunIMxi1Y9uXkcyr+I7TeNrr/foo4Kpk8=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.14.0/go.mod h1:l38EPgmsp71HHLq9j7De57JcKOWPyhrsW1Awm1JS6K0=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0 h1:tfLQ34V6F7tVSwoTf/4lH5sE0o6eCJuNDTmH09nDpbc=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0/go.mod h1:9kIvujWAA58nmPmWB1m23fyWic1kYZMxD9CxaWn4Qpg=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY=
@@ -143,14 +143,14 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8Yc
github.com/dennwc/varint v1.0.0 h1:kGNFFSSw8ToIy3obO/kKr8U9GZYUAxQEVuix4zfDWzE=
github.com/dennwc/varint v1.0.0/go.mod h1:hnItb35rvZvJrbTALZtY/iQfDs48JKRG1RPpgziApxA=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
-github.com/digitalocean/godo v1.119.0 h1:dmFNQwSIAcH3z+FVovHLkazKDC2uA8oOlGvg5+H4vRw=
-github.com/digitalocean/godo v1.119.0/go.mod h1:WQVH83OHUy6gC4gXpEVQKtxTd4L5oCp+5OialidkPLY=
+github.com/digitalocean/godo v1.122.0 h1:ziytLQi8QKtDp2K1A+YrYl2dWLHLh2uaMzWvcz9HkKg=
+github.com/digitalocean/godo v1.122.0/go.mod h1:WQVH83OHUy6gC4gXpEVQKtxTd4L5oCp+5OialidkPLY=
github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
-github.com/docker/docker v27.1.1+incompatible h1:hO/M4MtV36kzKldqnA37IWhebRA+LnqqcqDja6kVaKY=
-github.com/docker/docker v27.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/docker v27.2.0+incompatible h1:Rk9nIVdfH3+Vz4cyI/uhbINhEZ/oLmc+CBXmH6fbNk4=
+github.com/docker/docker v27.2.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
@@ -168,13 +168,11 @@ github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4s
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
-github.com/envoyproxy/go-control-plane v0.12.0 h1:4X+VP1GHd1Mhj6IB5mMeGbLCleqxjletLK6K0rbxyZI=
-github.com/envoyproxy/go-control-plane v0.12.0/go.mod h1:ZBTaoJ23lqITozF0M6G4/IragXCQKCnYbmlmtHvwRG0=
+github.com/envoyproxy/go-control-plane v0.13.0 h1:HzkeUz1Knt+3bK+8LG1bxOO/jzWZmdxpwC51i202les=
+github.com/envoyproxy/go-control-plane v0.13.0/go.mod h1:GRaKG3dwvFoTg4nj7aXdZnvMg4d7nvT/wl9WgVXn3Q8=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM=
github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4=
-github.com/evanphx/json-patch v5.6.0+incompatible h1:jBYDEEiFBPxA0v50tFdvOzQQTCvpL6mnFh5mB2/l16U=
-github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb h1:IT4JYU7k4ikYg1SCxNI1/Tieq/NFvh6dzLdgi7eu0tM=
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb/go.mod h1:bH6Xx7IW64qjjJq8M2u4dxNaBiDfKK+z/3eGDpXEQhc=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
@@ -191,6 +189,8 @@ github.com/frankban/quicktest v1.14.5/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7z
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
+github.com/fxamacker/cbor/v2 v2.7.0 h1:iM5WgngdRBanHcxugY4JySA0nk1wZorNOpTgCMedv5E=
+github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXEeXQVeNoDQ=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
@@ -236,8 +236,8 @@ github.com/go-resty/resty/v2 v2.13.1 h1:x+LHXBI2nMB1vqndymf26quycC4aggYJ7DECYbiz
github.com/go-resty/resty/v2 v2.13.1/go.mod h1:GznXlLxkq6Nh4sU59rPmUw3VtgpO3aS96ORAI6Q7d+0=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
-github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
-github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
+github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI=
+github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg=
github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA=
@@ -328,8 +328,8 @@ github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
-github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
-github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
+github.com/googleapis/enterprise-certificate-proxy v0.3.3 h1:QRje2j5GZimBzlbhGA2V2QlGNgL8G6e+wGo/+/2bWI0=
+github.com/googleapis/enterprise-certificate-proxy v0.3.3/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.13.0 h1:yitjD5f7jQHhyDsnhKEBU52NdvvdSeGzlAnDPT0hH1s=
@@ -350,8 +350,8 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgf
github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0 h1:bkypFPDjIYGfCYD5mRBvpqxfYX1YCS1PXdKYWi8FsN0=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.20.0/go.mod h1:P+Lt/0by1T8bfcF3z737NnSbmxQAppXMRziHUxPOC8k=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys=
+github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I=
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
github.com/hashicorp/consul/api v1.29.4 h1:P6slzxDLBOxUSj3fWo2o65VuKtbtOXFi7TSSgtXutuE=
github.com/hashicorp/consul/api v1.29.4/go.mod h1:HUlfw+l2Zy68ceJavv2zAyArl2fqhGWnMycyt56sBgg=
@@ -472,8 +472,8 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4=
-github.com/linode/linodego v1.38.0 h1:wP3oW9OhGc6vhze8NPf2knbwH4TzSbrjzuCd9okjbTY=
-github.com/linode/linodego v1.38.0/go.mod h1:L7GXKFD3PoN2xSEtFc04wIXP5WK65O10jYQx0PQISWQ=
+github.com/linode/linodego v1.40.0 h1:7ESY0PwK94hoggoCtIroT1Xk6b1flrFBNZ6KwqbTqlI=
+github.com/linode/linodego v1.40.0/go.mod h1:NsUw4l8QrLdIofRg1NYFBbW5ZERnmbZykVBszPZLORM=
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
@@ -497,11 +497,15 @@ github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzp
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/maxatome/go-testdeep v1.12.0 h1:Ql7Go8Tg0C1D/uMMX59LAoYK7LffeJQ6X2T04nTH68g=
github.com/maxatome/go-testdeep v1.12.0/go.mod h1:lPZc/HAcJMP92l7yI6TRz1aZN5URwUBUAfUNvrclaNM=
+github.com/mdlayher/socket v0.4.1 h1:eM9y2/jlbs1M615oshPQOHZzj6R6wMT7bX5NPiQvn2U=
+github.com/mdlayher/socket v0.4.1/go.mod h1:cAqeGjoufqdxWkD7DkpyS+wcefOtmu5OQ8KuoJGIReA=
+github.com/mdlayher/vsock v1.2.1 h1:pC1mTJTvjo1r9n9fbm7S1j04rCgCzhCOS5DY0zqHlnQ=
+github.com/mdlayher/vsock v1.2.1/go.mod h1:NRfCibel++DgeMD8z/hP+PPTjlNJsdPOmxcnENvE+SE=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
-github.com/miekg/dns v1.1.61 h1:nLxbwF3XxhwVSm8g9Dghm9MHPaUZuqhPiGL+675ZmEs=
-github.com/miekg/dns v1.1.61/go.mod h1:mnAarhS3nWaW+NVP2wTkYVIZyHNJ098SJZUki3eykwQ=
+github.com/miekg/dns v1.1.62 h1:cN8OuEF1/x5Rq6Np+h1epln8OiyPWV+lROx9LxcGgIQ=
+github.com/miekg/dns v1.1.62/go.mod h1:mvDlcItzm+br7MToIKqkglaGhlFMHJ9DTNNWONWXbNQ=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
@@ -553,11 +557,11 @@ github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:v
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4=
-github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o=
+github.com/onsi/ginkgo/v2 v2.19.0 h1:9Cnnf7UHo57Hy3k6/m5k3dRfGTMXGvxhHFvkDTCTpvA=
+github.com/onsi/ginkgo/v2 v2.19.0/go.mod h1:rlwLi9PilAFJ8jCg9UE1QP6VBpd6/xj3SRC0d6TU0To=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg=
-github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
+github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw=
+github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
@@ -592,6 +596,8 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA=
+github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 h1:GFCKgmp0tecUJ0sJuv4pzYCqS9+RGSn52M3FUwPs+uo=
+github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1:t/avpk3KcrXxUnYOhZhMXJlSEyie6gQbtLq5NM3loB8=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -608,8 +614,8 @@ github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeD
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
-github.com/prometheus/client_golang v1.20.2 h1:5ctymQzZlyOON1666svgwn3s6IKWgfbjsejTMiXIyjg=
-github.com/prometheus/client_golang v1.20.2/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
+github.com/prometheus/client_golang v1.20.3 h1:oPksm4K8B+Vt35tUhw6GbSNSgVlVSBH0qELP/7u83l4=
+github.com/prometheus/client_golang v1.20.3/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@@ -625,14 +631,14 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/common v0.29.0/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls=
-github.com/prometheus/common v0.56.0 h1:UffReloqkBtvtQEYDg2s+uDPGRrJyC6vZWPGXf6OhPY=
-github.com/prometheus/common v0.56.0/go.mod h1:7uRPFSUTbfZWsJ7MHY56sqt7hLQu3bxXHDnNhl8E9qI=
+github.com/prometheus/common v0.59.1 h1:LXb1quJHWm1P6wq/U824uxYi4Sg0oGvNeUm1z5dJoX0=
+github.com/prometheus/common v0.59.1/go.mod h1:GpWM7dewqmVYcd7SmRaiWVe9SSqjf0UrwnYnpEZNuT0=
github.com/prometheus/common/assets v0.2.0 h1:0P5OrzoHrYBOSM1OigWL3mY8ZvV2N4zIE/5AahrSrfM=
github.com/prometheus/common/assets v0.2.0/go.mod h1:D17UVUE12bHbim7HzwUvtqm6gwBEaDQ0F+hIGbFbccI=
github.com/prometheus/common/sigv4 v0.1.0 h1:qoVebwtwwEhS85Czm2dSROY5fTo2PAPEVdDeppTwGX4=
github.com/prometheus/common/sigv4 v0.1.0/go.mod h1:2Jkxxk9yYvCkE5G1sQT7GuEXm57JrvHu9k5YwTjsNtI=
-github.com/prometheus/exporter-toolkit v0.11.0 h1:yNTsuZ0aNCNFQ3aFTD2uhPOvr4iD7fdBvKPAEGkNf+g=
-github.com/prometheus/exporter-toolkit v0.11.0/go.mod h1:BVnENhnNecpwoTLiABx7mrPB/OLRIgN74qlQbV+FK1Q=
+github.com/prometheus/exporter-toolkit v0.12.0 h1:DkE5RcEZR3lQA2QD5JLVQIf41dFKNsVMXFhgqcif7fo=
+github.com/prometheus/exporter-toolkit v0.12.0/go.mod h1:fQH0KtTn0yrrS0S82kqppRjDDiwMfIQUwT+RBRRhwUc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
@@ -650,8 +656,8 @@ github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E=
-github.com/scaleway/scaleway-sdk-go v1.0.0-beta.29 h1:BkTk4gynLjguayxrYxZoMZjBnAOh7ntQvUkOFmkMqPU=
-github.com/scaleway/scaleway-sdk-go v1.0.0-beta.29/go.mod h1:fCa7OJZ/9DRTnOKmxvT6pn+LPWUptQAmHF/SBJUGEcg=
+github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30 h1:yoKAVkEVwAqbGbR8n87rHQ1dulL25rKloGadb3vm770=
+github.com/scaleway/scaleway-sdk-go v1.0.0-beta.30/go.mod h1:sH0u6fq6x4R5M7WxkoQFY/o7UaiItec0o1LinLCJNq8=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/shoenig/test v1.7.1 h1:UJcjSAI3aUKx52kfcfhblgyhZceouhvvs3OYdWgn+PY=
@@ -661,8 +667,8 @@ github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c/go.mod h1:owqhoLW1
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/simonpasquier/klog-gokit v0.3.0 h1:TkFK21cbwDRS+CiystjqbAiq5ubJcVTk9hLUck5Ntcs=
github.com/simonpasquier/klog-gokit v0.3.0/go.mod h1:+SUlDQNrhVtGt2FieaqNftzzk8P72zpWlACateWxA9k=
-github.com/simonpasquier/klog-gokit/v3 v3.3.0 h1:HMzH999kO5gEgJTaWWO+xjncW5oycspcsBnjn9b853Q=
-github.com/simonpasquier/klog-gokit/v3 v3.3.0/go.mod h1:uSbnWC3T7kt1dQyY9sjv0Ao1SehMAJdVnUNSKhjaDsg=
+github.com/simonpasquier/klog-gokit/v3 v3.5.0 h1:ewnk+ickph0hkQFgdI4pffKIbruAxxWcg0Fe/vQmLOM=
+github.com/simonpasquier/klog-gokit/v3 v3.5.0/go.mod h1:S9flvRzzpaYLYtXI2w8jf9R/IU/Cy14NrbvDUevNP1E=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
@@ -703,6 +709,8 @@ github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijb
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/vultr/govultr/v2 v2.17.2 h1:gej/rwr91Puc/tgh+j33p/BLR16UrIPnSr+AIwYWZQs=
github.com/vultr/govultr/v2 v2.17.2/go.mod h1:ZFOKGWmgjytfyjeyAdhQlSWwTjh2ig+X49cAp50dzXI=
+github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
+github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
@@ -724,26 +732,26 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
-go.opentelemetry.io/collector/pdata v1.12.0 h1:Xx5VK1p4VO0md8MWm2icwC1MnJ7f8EimKItMWw46BmA=
-go.opentelemetry.io/collector/pdata v1.12.0/go.mod h1:MYeB0MmMAxeM0hstCFrCqWLzdyeYySim2dG6pDT6nYI=
-go.opentelemetry.io/collector/semconv v0.105.0 h1:8p6dZ3JfxFTjbY38d8xlQGB1TQ3nPUvs+D0RERniZ1g=
-go.opentelemetry.io/collector/semconv v0.105.0/go.mod h1:yMVUCNoQPZVq/IPfrHrnntZTWsLf5YGZ7qwKulIl5hw=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0 h1:4K4tsIXefpVJtvA/8srF4V4y0akAoPHkIslgAkjixJA=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.53.0/go.mod h1:jjdQuTGVsXV4vSs+CJ2qYDeDPf9yIJV23qlIzBm73Vg=
-go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
-go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0 h1:3Q/xZUyC1BBkualc9ROb4G8qkH90LXEIICcs5zv1OYY=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.28.0/go.mod h1:s75jGIWA9OfCMzF0xr+ZgfrB5FEbbV7UuYo32ahUiFI=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0 h1:R3X6ZXmNPRR8ul6i3WgFURCHzaXjHdm0karRG/+dj3s=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.28.0/go.mod h1:QWFXnDavXWwMx2EEcZsf3yxgEKAqsxQ+Syjp+seyInw=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0 h1:j9+03ymgYhPKmeXGk5Zu+cIZOlVzd9Zv7QIiyItjFBU=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0/go.mod h1:Y5+XiUG4Emn1hTfciPzGPJaSI+RpDts6BnCIir0SLqk=
-go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
-go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
-go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE=
-go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg=
-go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
-go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
+go.opentelemetry.io/collector/pdata v1.14.1 h1:wXZjtQA7Vy5HFqco+yA95ENyMQU5heBB1IxMHQf6mUk=
+go.opentelemetry.io/collector/pdata v1.14.1/go.mod h1:z1dTjwwtcoXxZx2/nkHysjxMeaxe9pEmYTEr4SMNIx8=
+go.opentelemetry.io/collector/semconv v0.108.1 h1:Txk9tauUnamZaxS5vlf1O0uZ4VD6nioRBR0nX8L/fU4=
+go.opentelemetry.io/collector/semconv v0.108.1/go.mod h1:zCJ5njhWpejR+A40kiEoeFm1xq1uzyZwMnRNX6/D82A=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0 h1:TT4fX+nBOA/+LUkobKGW1ydGcn+G3vRw9+g5HwCphpk=
+go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.54.0/go.mod h1:L7UH0GbB0p47T4Rri3uHjbpCFYrVrwc1I25QhNPiGK8=
+go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw=
+go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0 h1:dIIDULZJpgdiHz5tXrTgKIMLkus6jEFa7x5SOKcyR7E=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0/go.mod h1:jlRVBe7+Z1wyxFSUs48L6OBQZ5JwH2Hg/Vbl+t9rAgI=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.29.0 h1:nSiV3s7wiCam610XcLbYOmMfJxB9gO4uK3Xgv5gmTgg=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.29.0/go.mod h1:hKn/e/Nmd19/x1gvIHwtOwVWM+VhuITSWip3JUDghj0=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0 h1:JAv0Jwtl01UFiyWZEMiJZBiTlv5A50zNs8lsthXqIio=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0/go.mod h1:QNKLmUEAq2QUbPQUfvw4fmv0bgbK7UlOSFCnXyfvSNc=
+go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc=
+go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8=
+go.opentelemetry.io/otel/sdk v1.29.0 h1:vkqKjk7gwhS8VaWb0POZKmIEDimRCMsopNYnriHyryo=
+go.opentelemetry.io/otel/sdk v1.29.0/go.mod h1:pM8Dx5WKnvxLCb+8lG1PRNIDxu9g9b9g59Qr7hfAAok=
+go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4=
+go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ=
go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0=
go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@@ -774,8 +782,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
-golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
-golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
+golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
+golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -810,8 +818,8 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.19.0 h1:fEdghXQSo20giMthA7cd28ZC+jts4amQ3YMXiP5oMQ8=
-golang.org/x/mod v0.19.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
+golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -857,16 +865,16 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
-golang.org/x/net v0.27.0 h1:5K3Njcw06/l2y9vpGCSdcxWOYHOUk3dVNGDXN+FvAys=
-golang.org/x/net v0.27.0/go.mod h1:dDi0PyhWNoiUOrAS8uXv/vnScO4wnHQO4mj9fn/RytE=
+golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
+golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA=
-golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
+golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -880,8 +888,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
-golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
+golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -947,16 +955,16 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
-golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
+golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
-golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
-golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
+golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU=
+golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -968,14 +976,15 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
-golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
-golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
+golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
+golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
-golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
+golang.org/x/time v0.6.0 h1:eTDhh4ZXt5Qf0augr54TN6suAUudPcawVZeIAPU7D4U=
+golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@@ -1026,8 +1035,8 @@ golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
-golang.org/x/tools v0.23.0 h1:SGsXPZ+2l4JsgaCKkx+FQ9YZ5XEtA1GZYuoDjenLjvg=
-golang.org/x/tools v0.23.0/go.mod h1:pnu6ufv6vQkll6szChhK3C3L/ruaIv5eBeztNG8wtsI=
+golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24=
+golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -1047,8 +1056,8 @@ google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/
google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
-google.golang.org/api v0.190.0 h1:ASM+IhLY1zljNdLu19W1jTmU6A+gMk6M46Wlur61s+Q=
-google.golang.org/api v0.190.0/go.mod h1:QIr6I9iedBLnfqoD6L6Vze1UvS5Hzj5r2aUBOaZnLHo=
+google.golang.org/api v0.196.0 h1:k/RafYqebaIJBO3+SMnfEGtFVlvp5vSgqTUF54UN/zg=
+google.golang.org/api v0.196.0/go.mod h1:g9IL21uGkYgvQ5BZg6BAtoGJQIm8r6EgaAbpNey5wBE=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@@ -1085,10 +1094,10 @@ google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1m
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
-google.golang.org/genproto/googleapis/api v0.0.0-20240725223205-93522f1f2a9f h1:b1Ln/PG8orm0SsBbHZWke8dDp2lrCD4jSmfglFpTZbk=
-google.golang.org/genproto/googleapis/api v0.0.0-20240725223205-93522f1f2a9f/go.mod h1:AHT0dDg3SoMOgZGnZk29b5xTbPHMoEC8qthmBLJCpys=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240730163845-b1a4ccb954bf h1:liao9UHurZLtiEwBgT9LMOnKYsHze6eA6w1KQCMVN2Q=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240730163845-b1a4ccb954bf/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
+google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed h1:3RgNmBoI9MZhsj3QxC+AP/qQhNwpCLOvYDYYsFrhFt0=
+google.golang.org/genproto/googleapis/api v0.0.0-20240827150818-7e3bb234dfed/go.mod h1:OCdP9MfskevB/rbYvHTsXTtKC+3bHWajPdoKgjcYkfo=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
@@ -1107,8 +1116,8 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc=
-google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ=
+google.golang.org/grpc v1.66.0 h1:DibZuoBznOxbDQxRINckZcUvnCEvrW9pcWIE2yF9r1c=
+google.golang.org/grpc v1.66.0/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -1130,6 +1139,8 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/evanphx/json-patch.v4 v4.12.0 h1:n6jtcsulIzXPJaxegRbvFNNrZDjbij7ny3gmSPG+6V4=
+gopkg.in/evanphx/json-patch.v4 v4.12.0/go.mod h1:p8EYWUEYMpynmqDbY58zCKCFZw8pRWMG4EsWvDvM72M=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o=
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
@@ -1163,16 +1174,16 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
-k8s.io/api v0.29.3 h1:2ORfZ7+bGC3YJqGpV0KSDDEVf8hdGQ6A03/50vj8pmw=
-k8s.io/api v0.29.3/go.mod h1:y2yg2NTyHUUkIoTC+phinTnEa3KFM6RZ3szxt014a80=
-k8s.io/apimachinery v0.29.3 h1:2tbx+5L7RNvqJjn7RIuIKu9XTsIZ9Z5wX2G22XAa5EU=
-k8s.io/apimachinery v0.29.3/go.mod h1:hx/S4V2PNW4OMg3WizRrHutyB5la0iCUbZym+W0EQIU=
-k8s.io/client-go v0.29.3 h1:R/zaZbEAxqComZ9FHeQwOh3Y1ZUs7FaHKZdQtIc2WZg=
-k8s.io/client-go v0.29.3/go.mod h1:tkDisCvgPfiRpxGnOORfkljmS+UrW+WtXAy2fTvXJB0=
+k8s.io/api v0.31.0 h1:b9LiSjR2ym/SzTOlfMHm1tr7/21aD7fSkqgD/CVJBCo=
+k8s.io/api v0.31.0/go.mod h1:0YiFF+JfFxMM6+1hQei8FY8M7s1Mth+z/q7eF1aJkTE=
+k8s.io/apimachinery v0.31.0 h1:m9jOiSr3FoSSL5WO9bjm1n6B9KROYYgNZOb4tyZ1lBc=
+k8s.io/apimachinery v0.31.0/go.mod h1:rsPdaZJfTfLsNJSQzNHQvYoTmxhoOEofxtOsF3rtsMo=
+k8s.io/client-go v0.31.0 h1:QqEJzNjbN2Yv1H79SsS+SWnXkBgVu4Pj3CJQgbx0gI8=
+k8s.io/client-go v0.31.0/go.mod h1:Y9wvC76g4fLjmU0BA+rV+h2cncoadjvjjkkIGoTLcGU=
k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340 h1:BZqlfIlq5YbRMFko6/PM7FjZpUb45WallggurYhKGag=
k8s.io/kube-openapi v0.0.0-20240228011516-70dd3763d340/go.mod h1:yD4MZYeKMBwQKVht279WycxKyM84kkAx2DPrTXaeb98=
-k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI=
-k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
+k8s.io/utils v0.0.0-20240711033017-18e509b52bc8 h1:pUdcCO1Lk/tbT5ztQWOBi5HBgbBP1J8+AsQnQCKsi8A=
+k8s.io/utils v0.0.0-20240711033017-18e509b52bc8/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
@@ -1181,6 +1192,6 @@ sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h6
sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4=
sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
-sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
-sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
+sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E=
+sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
diff --git a/promql/engine.go b/promql/engine.go
index 894ecf6ca..1a5f828ae 100644
--- a/promql/engine.go
+++ b/promql/engine.go
@@ -435,6 +435,10 @@ func NewEngine(opts EngineOpts) *Engine {
// Close closes ng.
func (ng *Engine) Close() error {
+ if ng == nil {
+ return nil
+ }
+
if ng.activeQueryTracker != nil {
return ng.activeQueryTracker.Close()
}
diff --git a/promql/engine_test.go b/promql/engine_test.go
index 4015d0874..d5daa72af 100644
--- a/promql/engine_test.go
+++ b/promql/engine_test.go
@@ -3016,6 +3016,29 @@ func TestEngineOptsValidation(t *testing.T) {
}
}
+func TestEngine_Close(t *testing.T) {
+ t.Run("nil engine", func(t *testing.T) {
+ var ng *promql.Engine
+ require.NoError(t, ng.Close())
+ })
+
+ t.Run("non-nil engine", func(t *testing.T) {
+ ng := promql.NewEngine(promql.EngineOpts{
+ Logger: nil,
+ Reg: nil,
+ MaxSamples: 0,
+ Timeout: 100 * time.Second,
+ NoStepSubqueryIntervalFn: nil,
+ EnableAtModifier: true,
+ EnableNegativeOffset: true,
+ EnablePerStepStats: false,
+ LookbackDelta: 0,
+ EnableDelayedNameRemoval: true,
+ })
+ require.NoError(t, ng.Close())
+ })
+}
+
func TestInstantQueryWithRangeVectorSelector(t *testing.T) {
engine := newTestEngine(t)
diff --git a/promql/functions.go b/promql/functions.go
index 8141d2a9e..182b69b08 100644
--- a/promql/functions.go
+++ b/promql/functions.go
@@ -131,10 +131,18 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
sampledInterval := float64(lastT-firstT) / 1000
averageDurationBetweenSamples := sampledInterval / float64(numSamplesMinusOne)
- // If the first/last samples are close to the boundaries of the range,
- // extrapolate the result. This is as we expect that another sample
- // will exist given the spacing between samples we've seen thus far,
- // with an allowance for noise.
+ // If samples are close enough to the (lower or upper) boundary of the
+ // range, we extrapolate the rate all the way to the boundary in
+ // question. "Close enough" is defined as "up to 10% more than the
+ // average duration between samples within the range", see
+ // extrapolationThreshold below. Essentially, we are assuming a more or
+ // less regular spacing between samples, and if we don't see a sample
+ // where we would expect one, we assume the series does not cover the
+ // whole range, but starts and/or ends within the range. We still
+ // extrapolate the rate in this case, but not all the way to the
+ // boundary, but only by half of the average duration between samples
+ // (which is our guess for where the series actually starts or ends).
+
extrapolationThreshold := averageDurationBetweenSamples * 1.1
extrapolateToInterval := sampledInterval
diff --git a/scrape/manager.go b/scrape/manager.go
index e3dba5f0e..d7786a082 100644
--- a/scrape/manager.go
+++ b/scrape/manager.go
@@ -17,6 +17,7 @@ import (
"errors"
"fmt"
"hash/fnv"
+ "io"
"reflect"
"sync"
"time"
@@ -36,7 +37,7 @@ import (
)
// NewManager is the Manager constructor.
-func NewManager(o *Options, logger log.Logger, app storage.Appendable, registerer prometheus.Registerer) (*Manager, error) {
+func NewManager(o *Options, logger log.Logger, newScrapeFailureLogger func(string) (log.Logger, error), app storage.Appendable, registerer prometheus.Registerer) (*Manager, error) {
if o == nil {
o = &Options{}
}
@@ -50,15 +51,16 @@ func NewManager(o *Options, logger log.Logger, app storage.Appendable, registere
}
m := &Manager{
- append: app,
- opts: o,
- logger: logger,
- scrapeConfigs: make(map[string]*config.ScrapeConfig),
- scrapePools: make(map[string]*scrapePool),
- graceShut: make(chan struct{}),
- triggerReload: make(chan struct{}, 1),
- metrics: sm,
- buffers: pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) }),
+ append: app,
+ opts: o,
+ logger: logger,
+ newScrapeFailureLogger: newScrapeFailureLogger,
+ scrapeConfigs: make(map[string]*config.ScrapeConfig),
+ scrapePools: make(map[string]*scrapePool),
+ graceShut: make(chan struct{}),
+ triggerReload: make(chan struct{}, 1),
+ metrics: sm,
+ buffers: pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) }),
}
m.metrics.setTargetMetadataCacheGatherer(m)
@@ -103,12 +105,14 @@ type Manager struct {
append storage.Appendable
graceShut chan struct{}
- offsetSeed uint64 // Global offsetSeed seed is used to spread scrape workload across HA setup.
- mtxScrape sync.Mutex // Guards the fields below.
- scrapeConfigs map[string]*config.ScrapeConfig
- scrapePools map[string]*scrapePool
- targetSets map[string][]*targetgroup.Group
- buffers *pool.Pool
+ offsetSeed uint64 // Global offsetSeed seed is used to spread scrape workload across HA setup.
+ mtxScrape sync.Mutex // Guards the fields below.
+ scrapeConfigs map[string]*config.ScrapeConfig
+ scrapePools map[string]*scrapePool
+ newScrapeFailureLogger func(string) (log.Logger, error)
+ scrapeFailureLoggers map[string]log.Logger
+ targetSets map[string][]*targetgroup.Group
+ buffers *pool.Pool
triggerReload chan struct{}
@@ -183,6 +187,11 @@ func (m *Manager) reload() {
continue
}
m.scrapePools[setName] = sp
+ if l, ok := m.scrapeFailureLoggers[scrapeConfig.ScrapeFailureLogFile]; ok {
+ sp.SetScrapeFailureLogger(l)
+ } else {
+ level.Error(sp.logger).Log("msg", "No logger found. This is a bug in Prometheus that should be reported upstream.", "scrape_pool", setName)
+ }
}
wg.Add(1)
@@ -238,11 +247,36 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error {
}
c := make(map[string]*config.ScrapeConfig)
+ scrapeFailureLoggers := map[string]log.Logger{
+ "": nil, // Emptying the file name sets the scrape logger to nil.
+ }
for _, scfg := range scfgs {
c[scfg.JobName] = scfg
+ if _, ok := scrapeFailureLoggers[scfg.ScrapeFailureLogFile]; !ok {
+ // We promise to reopen the file on each reload.
+ var (
+ l log.Logger
+ err error
+ )
+ if m.newScrapeFailureLogger != nil {
+ if l, err = m.newScrapeFailureLogger(scfg.ScrapeFailureLogFile); err != nil {
+ return err
+ }
+ }
+ scrapeFailureLoggers[scfg.ScrapeFailureLogFile] = l
+ }
}
m.scrapeConfigs = c
+ oldScrapeFailureLoggers := m.scrapeFailureLoggers
+ for _, s := range oldScrapeFailureLoggers {
+ if closer, ok := s.(io.Closer); ok {
+ defer closer.Close()
+ }
+ }
+
+ m.scrapeFailureLoggers = scrapeFailureLoggers
+
if err := m.setOffsetSeed(cfg.GlobalConfig.ExternalLabels); err != nil {
return err
}
@@ -260,6 +294,13 @@ func (m *Manager) ApplyConfig(cfg *config.Config) error {
level.Error(m.logger).Log("msg", "error reloading scrape pool", "err", err, "scrape_pool", name)
failed = true
}
+ fallthrough
+ case ok:
+ if l, ok := m.scrapeFailureLoggers[cfg.ScrapeFailureLogFile]; ok {
+ sp.SetScrapeFailureLogger(l)
+ } else {
+ level.Error(sp.logger).Log("msg", "No logger found. This is a bug in Prometheus that should be reported upstream.", "scrape_pool", name)
+ }
}
}
diff --git a/scrape/manager_test.go b/scrape/manager_test.go
index 0b24a86ee..3a1d1e1dd 100644
--- a/scrape/manager_test.go
+++ b/scrape/manager_test.go
@@ -516,7 +516,7 @@ scrape_configs:
)
opts := Options{}
- scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
+ scrapeManager, err := NewManager(&opts, nil, nil, nil, testRegistry)
require.NoError(t, err)
newLoop := func(scrapeLoopOptions) loop {
ch <- struct{}{}
@@ -581,7 +581,7 @@ scrape_configs:
func TestManagerTargetsUpdates(t *testing.T) {
opts := Options{}
testRegistry := prometheus.NewRegistry()
- m, err := NewManager(&opts, nil, nil, testRegistry)
+ m, err := NewManager(&opts, nil, nil, nil, testRegistry)
require.NoError(t, err)
ts := make(chan map[string][]*targetgroup.Group)
@@ -634,7 +634,7 @@ global:
opts := Options{}
testRegistry := prometheus.NewRegistry()
- scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
+ scrapeManager, err := NewManager(&opts, nil, nil, nil, testRegistry)
require.NoError(t, err)
// Load the first config.
@@ -711,7 +711,7 @@ scrape_configs:
}
opts := Options{}
- scrapeManager, err := NewManager(&opts, nil, nil, testRegistry)
+ scrapeManager, err := NewManager(&opts, nil, nil, nil, testRegistry)
require.NoError(t, err)
reload(scrapeManager, cfg1)
@@ -763,6 +763,7 @@ func TestManagerCTZeroIngestion(t *testing.T) {
skipOffsetting: true,
},
log.NewLogfmtLogger(os.Stderr),
+ nil,
&collectResultAppendable{app},
prometheus.NewRegistry(),
)
@@ -862,7 +863,7 @@ func TestUnregisterMetrics(t *testing.T) {
// Check that all metrics can be unregistered, allowing a second manager to be created.
for i := 0; i < 2; i++ {
opts := Options{}
- manager, err := NewManager(&opts, nil, nil, reg)
+ manager, err := NewManager(&opts, nil, nil, nil, reg)
require.NotNil(t, manager)
require.NoError(t, err)
// Unregister all metrics.
@@ -906,6 +907,7 @@ func runManagers(t *testing.T, ctx context.Context) (*discovery.Manager, *Manage
scrapeManager, err := NewManager(
&Options{DiscoveryReloadInterval: model.Duration(100 * time.Millisecond)},
nil,
+ nil,
nopAppendable{},
prometheus.NewRegistry(),
)
diff --git a/scrape/scrape.go b/scrape/scrape.go
index d407e94bf..a64090e18 100644
--- a/scrape/scrape.go
+++ b/scrape/scrape.go
@@ -90,6 +90,9 @@ type scrapePool struct {
noDefaultPort bool
metrics *scrapeMetrics
+
+ scrapeFailureLogger log.Logger
+ scrapeFailureLoggerMtx sync.RWMutex
}
type labelLimits struct {
@@ -218,6 +221,27 @@ func (sp *scrapePool) DroppedTargetsCount() int {
return sp.droppedTargetsCount
}
+func (sp *scrapePool) SetScrapeFailureLogger(l log.Logger) {
+ sp.scrapeFailureLoggerMtx.Lock()
+ defer sp.scrapeFailureLoggerMtx.Unlock()
+ if l != nil {
+ l = log.With(l, "job_name", sp.config.JobName)
+ }
+ sp.scrapeFailureLogger = l
+
+ sp.targetMtx.Lock()
+ defer sp.targetMtx.Unlock()
+ for _, s := range sp.loops {
+ s.setScrapeFailureLogger(sp.scrapeFailureLogger)
+ }
+}
+
+func (sp *scrapePool) getScrapeFailureLogger() log.Logger {
+ sp.scrapeFailureLoggerMtx.RLock()
+ defer sp.scrapeFailureLoggerMtx.RUnlock()
+ return sp.scrapeFailureLogger
+}
+
// stop terminates all scrape loops and returns after they all terminated.
func (sp *scrapePool) stop() {
sp.mtx.Lock()
@@ -361,6 +385,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
wg.Done()
newLoop.setForcedError(forcedErr)
+ newLoop.setScrapeFailureLogger(sp.getScrapeFailureLogger())
newLoop.run(nil)
}(oldLoop, newLoop)
@@ -503,6 +528,7 @@ func (sp *scrapePool) sync(targets []*Target) {
if err != nil {
l.setForcedError(err)
}
+ l.setScrapeFailureLogger(sp.scrapeFailureLogger)
sp.activeTargets[hash] = t
sp.loops[hash] = l
@@ -825,6 +851,7 @@ func (s *targetScraper) readResponse(ctx context.Context, resp *http.Response, w
type loop interface {
run(errc chan<- error)
setForcedError(err error)
+ setScrapeFailureLogger(log.Logger)
stop()
getCache() *scrapeCache
disableEndOfRunStalenessMarkers()
@@ -840,6 +867,8 @@ type cacheEntry struct {
type scrapeLoop struct {
scraper scraper
l log.Logger
+ scrapeFailureLogger log.Logger
+ scrapeFailureLoggerMtx sync.RWMutex
cache *scrapeCache
lastScrapeSize int
buffers *pool.Pool
@@ -1223,6 +1252,15 @@ func newScrapeLoop(ctx context.Context,
return sl
}
+func (sl *scrapeLoop) setScrapeFailureLogger(l log.Logger) {
+ sl.scrapeFailureLoggerMtx.Lock()
+ defer sl.scrapeFailureLoggerMtx.Unlock()
+ if ts, ok := sl.scraper.(fmt.Stringer); ok && l != nil {
+ l = log.With(l, "target", ts.String())
+ }
+ sl.scrapeFailureLogger = l
+}
+
func (sl *scrapeLoop) run(errc chan<- error) {
if !sl.skipOffsetting {
select {
@@ -1366,6 +1404,11 @@ func (sl *scrapeLoop) scrapeAndReport(last, appendTime time.Time, errc chan<- er
bytesRead = len(b)
} else {
level.Debug(sl.l).Log("msg", "Scrape failed", "err", scrapeErr)
+ sl.scrapeFailureLoggerMtx.RLock()
+ if sl.scrapeFailureLogger != nil {
+ sl.scrapeFailureLogger.Log("err", scrapeErr)
+ }
+ sl.scrapeFailureLoggerMtx.RUnlock()
if errc != nil {
errc <- scrapeErr
}
diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go
index 2121ecab1..7725bde5e 100644
--- a/scrape/scrape_test.go
+++ b/scrape/scrape_test.go
@@ -158,6 +158,9 @@ type testLoop struct {
timeout time.Duration
}
+func (l *testLoop) setScrapeFailureLogger(log.Logger) {
+}
+
func (l *testLoop) run(errc chan<- error) {
if l.runOnce {
panic("loop must be started only once")
@@ -3784,7 +3787,7 @@ scrape_configs:
s.DB.EnableNativeHistograms()
reg := prometheus.NewRegistry()
- mng, err := NewManager(&Options{EnableNativeHistogramsIngestion: true}, nil, s, reg)
+ mng, err := NewManager(&Options{EnableNativeHistogramsIngestion: true}, nil, nil, s, reg)
require.NoError(t, err)
cfg, err := config.Load(configStr, false, log.NewNopLogger())
require.NoError(t, err)
diff --git a/scripts/golangci-lint.yml b/scripts/golangci-lint.yml
index f4a7385bb..a15cfc97f 100644
--- a/scripts/golangci-lint.yml
+++ b/scripts/golangci-lint.yml
@@ -28,7 +28,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2
with:
- go-version: 1.22.x
+ go-version: 1.23.x
- name: Install snmp_exporter/generator dependencies
run: sudo apt-get update && sudo apt-get -y install libsnmp-dev
if: github.repository == 'prometheus/snmp_exporter'
diff --git a/scripts/ui_release.sh b/scripts/ui_release.sh
index ea4423d25..c1b872fd3 100755
--- a/scripts/ui_release.sh
+++ b/scripts/ui_release.sh
@@ -30,8 +30,8 @@ function publish() {
cmd+=" --dry-run"
fi
for workspace in ${workspaces}; do
- # package "app" is private so we shouldn't try to publish it.
- if [[ "${workspace}" != "react-app" ]]; then
+ # package "mantine-ui" is private so we shouldn't try to publish it.
+ if [[ "${workspace}" != "mantine-ui" ]]; then
cd "${workspace}"
eval "${cmd}"
cd "${root_ui_folder}"
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/context.go b/storage/remote/otlptranslator/prometheusremotewrite/context.go
new file mode 100644
index 000000000..5c6dd20f1
--- /dev/null
+++ b/storage/remote/otlptranslator/prometheusremotewrite/context.go
@@ -0,0 +1,37 @@
+// Copyright 2024 The Prometheus Authors
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package prometheusremotewrite
+
+import "context"
+
+// everyNTimes supports checking for context error every n times.
+type everyNTimes struct {
+ n int
+ i int
+ err error
+}
+
+// checkContext calls ctx.Err() every e.n times and returns an eventual error.
+func (e *everyNTimes) checkContext(ctx context.Context) error {
+ if e.err != nil {
+ return e.err
+ }
+
+ e.i++
+ if e.i >= e.n {
+ e.i = 0
+ e.err = ctx.Err()
+ }
+
+ return e.err
+}
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/context_test.go b/storage/remote/otlptranslator/prometheusremotewrite/context_test.go
new file mode 100644
index 000000000..94b23be04
--- /dev/null
+++ b/storage/remote/otlptranslator/prometheusremotewrite/context_test.go
@@ -0,0 +1,40 @@
+// Copyright 2024 The Prometheus Authors
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package prometheusremotewrite
+
+import (
+ "context"
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestEveryNTimes(t *testing.T) {
+ const n = 128
+ ctx, cancel := context.WithCancel(context.Background())
+ e := &everyNTimes{
+ n: n,
+ }
+
+ for i := 0; i < n; i++ {
+ require.NoError(t, e.checkContext(ctx))
+ }
+
+ cancel()
+ for i := 0; i < n-1; i++ {
+ require.NoError(t, e.checkContext(ctx))
+ }
+ require.EqualError(t, e.checkContext(ctx), context.Canceled.Error())
+ // e should remember the error.
+ require.EqualError(t, e.checkContext(ctx), context.Canceled.Error())
+}
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/helper.go b/storage/remote/otlptranslator/prometheusremotewrite/helper.go
index 67cf28119..fd7f58f07 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/helper.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/helper.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"encoding/hex"
"fmt"
"log"
@@ -241,9 +242,13 @@ func isValidAggregationTemporality(metric pmetric.Metric) bool {
// with the user defined bucket boundaries of non-exponential OTel histograms.
// However, work is under way to resolve this shortcoming through a feature called native histograms custom buckets:
// https://github.com/prometheus/prometheus/issues/13485.
-func (c *PrometheusConverter) addHistogramDataPoints(dataPoints pmetric.HistogramDataPointSlice,
- resource pcommon.Resource, settings Settings, baseName string) {
+func (c *PrometheusConverter) addHistogramDataPoints(ctx context.Context, dataPoints pmetric.HistogramDataPointSlice,
+ resource pcommon.Resource, settings Settings, baseName string) error {
for x := 0; x < dataPoints.Len(); x++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
+
pt := dataPoints.At(x)
timestamp := convertTimeStamp(pt.Timestamp())
baseLabels := createAttributes(resource, pt.Attributes(), settings, nil, false)
@@ -284,6 +289,10 @@ func (c *PrometheusConverter) addHistogramDataPoints(dataPoints pmetric.Histogra
// process each bound, based on histograms proto definition, # of buckets = # of explicit bounds + 1
for i := 0; i < pt.ExplicitBounds().Len() && i < pt.BucketCounts().Len(); i++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
+
bound := pt.ExplicitBounds().At(i)
cumulativeCount += pt.BucketCounts().At(i)
bucket := &prompb.Sample{
@@ -312,7 +321,9 @@ func (c *PrometheusConverter) addHistogramDataPoints(dataPoints pmetric.Histogra
ts := c.addSample(infBucket, infLabels)
bucketBounds = append(bucketBounds, bucketBoundsData{ts: ts, bound: math.Inf(1)})
- c.addExemplars(pt, bucketBounds)
+ if err := c.addExemplars(ctx, pt, bucketBounds); err != nil {
+ return err
+ }
startTimestamp := pt.StartTimestamp()
if settings.ExportCreatedMetric && startTimestamp != 0 {
@@ -320,6 +331,8 @@ func (c *PrometheusConverter) addHistogramDataPoints(dataPoints pmetric.Histogra
c.addTimeSeriesIfNeeded(labels, startTimestamp, pt.Timestamp())
}
}
+
+ return nil
}
type exemplarType interface {
@@ -327,9 +340,13 @@ type exemplarType interface {
Exemplars() pmetric.ExemplarSlice
}
-func getPromExemplars[T exemplarType](pt T) []prompb.Exemplar {
+func getPromExemplars[T exemplarType](ctx context.Context, everyN *everyNTimes, pt T) ([]prompb.Exemplar, error) {
promExemplars := make([]prompb.Exemplar, 0, pt.Exemplars().Len())
for i := 0; i < pt.Exemplars().Len(); i++ {
+ if err := everyN.checkContext(ctx); err != nil {
+ return nil, err
+ }
+
exemplar := pt.Exemplars().At(i)
exemplarRunes := 0
@@ -379,7 +396,7 @@ func getPromExemplars[T exemplarType](pt T) []prompb.Exemplar {
promExemplars = append(promExemplars, promExemplar)
}
- return promExemplars
+ return promExemplars, nil
}
// mostRecentTimestampInMetric returns the latest timestamp in a batch of metrics
@@ -417,9 +434,13 @@ func mostRecentTimestampInMetric(metric pmetric.Metric) pcommon.Timestamp {
return ts
}
-func (c *PrometheusConverter) addSummaryDataPoints(dataPoints pmetric.SummaryDataPointSlice, resource pcommon.Resource,
- settings Settings, baseName string) {
+func (c *PrometheusConverter) addSummaryDataPoints(ctx context.Context, dataPoints pmetric.SummaryDataPointSlice, resource pcommon.Resource,
+ settings Settings, baseName string) error {
for x := 0; x < dataPoints.Len(); x++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
+
pt := dataPoints.At(x)
timestamp := convertTimeStamp(pt.Timestamp())
baseLabels := createAttributes(resource, pt.Attributes(), settings, nil, false)
@@ -468,6 +489,8 @@ func (c *PrometheusConverter) addSummaryDataPoints(dataPoints pmetric.SummaryDat
c.addTimeSeriesIfNeeded(createdLabels, startTimestamp, pt.Timestamp())
}
}
+
+ return nil
}
// createLabels returns a copy of baseLabels, adding to it the pair model.MetricNameLabel=name.
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/helper_test.go b/storage/remote/otlptranslator/prometheusremotewrite/helper_test.go
index e02ebbf5d..a48a57b06 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/helper_test.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/helper_test.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"testing"
"time"
@@ -280,6 +281,7 @@ func TestPrometheusConverter_AddSummaryDataPoints(t *testing.T) {
converter := NewPrometheusConverter()
converter.addSummaryDataPoints(
+ context.Background(),
metric.Summary().DataPoints(),
pcommon.NewResource(),
Settings{
@@ -390,6 +392,7 @@ func TestPrometheusConverter_AddHistogramDataPoints(t *testing.T) {
converter := NewPrometheusConverter()
converter.addHistogramDataPoints(
+ context.Background(),
metric.Histogram().DataPoints(),
pcommon.NewResource(),
Settings{
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/histograms.go b/storage/remote/otlptranslator/prometheusremotewrite/histograms.go
index ec93387fc..8349d4f90 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/histograms.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/histograms.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"fmt"
"math"
@@ -33,10 +34,14 @@ const defaultZeroThreshold = 1e-128
// addExponentialHistogramDataPoints adds OTel exponential histogram data points to the corresponding time series
// as native histogram samples.
-func (c *PrometheusConverter) addExponentialHistogramDataPoints(dataPoints pmetric.ExponentialHistogramDataPointSlice,
+func (c *PrometheusConverter) addExponentialHistogramDataPoints(ctx context.Context, dataPoints pmetric.ExponentialHistogramDataPointSlice,
resource pcommon.Resource, settings Settings, promName string) (annotations.Annotations, error) {
var annots annotations.Annotations
for x := 0; x < dataPoints.Len(); x++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return annots, err
+ }
+
pt := dataPoints.At(x)
histogram, ws, err := exponentialToNativeHistogram(pt)
@@ -57,15 +62,18 @@ func (c *PrometheusConverter) addExponentialHistogramDataPoints(dataPoints pmetr
ts, _ := c.getOrCreateTimeSeries(lbls)
ts.Histograms = append(ts.Histograms, histogram)
- exemplars := getPromExemplars[pmetric.ExponentialHistogramDataPoint](pt)
+ exemplars, err := getPromExemplars[pmetric.ExponentialHistogramDataPoint](ctx, &c.everyN, pt)
+ if err != nil {
+ return annots, err
+ }
ts.Exemplars = append(ts.Exemplars, exemplars...)
}
return annots, nil
}
-// exponentialToNativeHistogram translates OTel Exponential Histogram data point
-// to Prometheus Native Histogram.
+// exponentialToNativeHistogram translates an OTel Exponential Histogram data point
+// to a Prometheus Native Histogram.
func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prompb.Histogram, annotations.Annotations, error) {
var annots annotations.Annotations
scale := p.Scale()
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go b/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go
index cd1c858ac..e064ab28a 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"fmt"
"testing"
"time"
@@ -754,6 +755,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
converter := NewPrometheusConverter()
annots, err := converter.addExponentialHistogramDataPoints(
+ context.Background(),
metric.ExponentialHistogram().DataPoints(),
pcommon.NewResource(),
Settings{
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go
index 9d7680080..0afd2ad57 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"errors"
"fmt"
"sort"
@@ -44,6 +45,7 @@ type Settings struct {
type PrometheusConverter struct {
unique map[uint64]*prompb.TimeSeries
conflicts map[uint64][]*prompb.TimeSeries
+ everyN everyNTimes
}
func NewPrometheusConverter() *PrometheusConverter {
@@ -54,7 +56,8 @@ func NewPrometheusConverter() *PrometheusConverter {
}
// FromMetrics converts pmetric.Metrics to Prometheus remote write format.
-func (c *PrometheusConverter) FromMetrics(md pmetric.Metrics, settings Settings) (annots annotations.Annotations, errs error) {
+func (c *PrometheusConverter) FromMetrics(ctx context.Context, md pmetric.Metrics, settings Settings) (annots annotations.Annotations, errs error) {
+ c.everyN = everyNTimes{n: 128}
resourceMetricsSlice := md.ResourceMetrics()
for i := 0; i < resourceMetricsSlice.Len(); i++ {
resourceMetrics := resourceMetricsSlice.At(i)
@@ -68,6 +71,11 @@ func (c *PrometheusConverter) FromMetrics(md pmetric.Metrics, settings Settings)
// TODO: decide if instrumentation library information should be exported as labels
for k := 0; k < metricSlice.Len(); k++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ errs = multierr.Append(errs, err)
+ return
+ }
+
metric := metricSlice.At(k)
mostRecentTimestamp = max(mostRecentTimestamp, mostRecentTimestampInMetric(metric))
@@ -87,21 +95,36 @@ func (c *PrometheusConverter) FromMetrics(md pmetric.Metrics, settings Settings)
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
break
}
- c.addGaugeNumberDataPoints(dataPoints, resource, settings, promName)
+ if err := c.addGaugeNumberDataPoints(ctx, dataPoints, resource, settings, promName); err != nil {
+ errs = multierr.Append(errs, err)
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ return
+ }
+ }
case pmetric.MetricTypeSum:
dataPoints := metric.Sum().DataPoints()
if dataPoints.Len() == 0 {
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
break
}
- c.addSumNumberDataPoints(dataPoints, resource, metric, settings, promName)
+ if err := c.addSumNumberDataPoints(ctx, dataPoints, resource, metric, settings, promName); err != nil {
+ errs = multierr.Append(errs, err)
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ return
+ }
+ }
case pmetric.MetricTypeHistogram:
dataPoints := metric.Histogram().DataPoints()
if dataPoints.Len() == 0 {
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
break
}
- c.addHistogramDataPoints(dataPoints, resource, settings, promName)
+ if err := c.addHistogramDataPoints(ctx, dataPoints, resource, settings, promName); err != nil {
+ errs = multierr.Append(errs, err)
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ return
+ }
+ }
case pmetric.MetricTypeExponentialHistogram:
dataPoints := metric.ExponentialHistogram().DataPoints()
if dataPoints.Len() == 0 {
@@ -109,20 +132,31 @@ func (c *PrometheusConverter) FromMetrics(md pmetric.Metrics, settings Settings)
break
}
ws, err := c.addExponentialHistogramDataPoints(
+ ctx,
dataPoints,
resource,
settings,
promName,
)
annots.Merge(ws)
- errs = multierr.Append(errs, err)
+ if err != nil {
+ errs = multierr.Append(errs, err)
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ return
+ }
+ }
case pmetric.MetricTypeSummary:
dataPoints := metric.Summary().DataPoints()
if dataPoints.Len() == 0 {
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
break
}
- c.addSummaryDataPoints(dataPoints, resource, settings, promName)
+ if err := c.addSummaryDataPoints(ctx, dataPoints, resource, settings, promName); err != nil {
+ errs = multierr.Append(errs, err)
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ return
+ }
+ }
default:
errs = multierr.Append(errs, errors.New("unsupported metric type"))
}
@@ -148,25 +182,33 @@ func isSameMetric(ts *prompb.TimeSeries, lbls []prompb.Label) bool {
// addExemplars adds exemplars for the dataPoint. For each exemplar, if it can find a bucket bound corresponding to its value,
// the exemplar is added to the bucket bound's time series, provided that the time series' has samples.
-func (c *PrometheusConverter) addExemplars(dataPoint pmetric.HistogramDataPoint, bucketBounds []bucketBoundsData) {
+func (c *PrometheusConverter) addExemplars(ctx context.Context, dataPoint pmetric.HistogramDataPoint, bucketBounds []bucketBoundsData) error {
if len(bucketBounds) == 0 {
- return
+ return nil
}
- exemplars := getPromExemplars(dataPoint)
+ exemplars, err := getPromExemplars(ctx, &c.everyN, dataPoint)
+ if err != nil {
+ return err
+ }
if len(exemplars) == 0 {
- return
+ return nil
}
sort.Sort(byBucketBoundsData(bucketBounds))
for _, exemplar := range exemplars {
for _, bound := range bucketBounds {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
if len(bound.ts.Samples) > 0 && exemplar.Value <= bound.bound {
bound.ts.Exemplars = append(bound.ts.Exemplars, exemplar)
break
}
}
}
+
+ return nil
}
// addSample finds a TimeSeries that corresponds to lbls, and adds sample to it.
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go
index bdc1c9d0b..641437632 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"fmt"
"testing"
"time"
@@ -28,6 +29,39 @@ import (
)
func TestFromMetrics(t *testing.T) {
+ t.Run("successful", func(t *testing.T) {
+ converter := NewPrometheusConverter()
+ payload := createExportRequest(5, 128, 128, 2, 0)
+
+ annots, err := converter.FromMetrics(context.Background(), payload.Metrics(), Settings{})
+ require.NoError(t, err)
+ require.Empty(t, annots)
+ })
+
+ t.Run("context cancellation", func(t *testing.T) {
+ converter := NewPrometheusConverter()
+ ctx, cancel := context.WithCancel(context.Background())
+ // Verify that converter.FromMetrics respects cancellation.
+ cancel()
+ payload := createExportRequest(5, 128, 128, 2, 0)
+
+ annots, err := converter.FromMetrics(ctx, payload.Metrics(), Settings{})
+ require.ErrorIs(t, err, context.Canceled)
+ require.Empty(t, annots)
+ })
+
+ t.Run("context timeout", func(t *testing.T) {
+ converter := NewPrometheusConverter()
+ // Verify that converter.FromMetrics respects timeout.
+ ctx, cancel := context.WithTimeout(context.Background(), 0)
+ t.Cleanup(cancel)
+ payload := createExportRequest(5, 128, 128, 2, 0)
+
+ annots, err := converter.FromMetrics(ctx, payload.Metrics(), Settings{})
+ require.ErrorIs(t, err, context.DeadlineExceeded)
+ require.Empty(t, annots)
+ })
+
t.Run("exponential histogram warnings for zero count and non-zero sum", func(t *testing.T) {
request := pmetricotlp.NewExportRequest()
rm := request.Metrics().ResourceMetrics().AppendEmpty()
@@ -51,7 +85,7 @@ func TestFromMetrics(t *testing.T) {
}
converter := NewPrometheusConverter()
- annots, err := converter.FromMetrics(request.Metrics(), Settings{})
+ annots, err := converter.FromMetrics(context.Background(), request.Metrics(), Settings{})
require.NoError(t, err)
require.NotEmpty(t, annots)
ws, infos := annots.AsStrings("", 0, 0)
@@ -84,7 +118,7 @@ func BenchmarkPrometheusConverter_FromMetrics(b *testing.B) {
for i := 0; i < b.N; i++ {
converter := NewPrometheusConverter()
- annots, err := converter.FromMetrics(payload.Metrics(), Settings{})
+ annots, err := converter.FromMetrics(context.Background(), payload.Metrics(), Settings{})
require.NoError(b, err)
require.Empty(b, annots)
require.NotNil(b, converter.TimeSeries())
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/number_data_points.go b/storage/remote/otlptranslator/prometheusremotewrite/number_data_points.go
index 80ccb46c7..6cdab450e 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/number_data_points.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/number_data_points.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"math"
"github.com/prometheus/common/model"
@@ -27,9 +28,13 @@ import (
"github.com/prometheus/prometheus/prompb"
)
-func (c *PrometheusConverter) addGaugeNumberDataPoints(dataPoints pmetric.NumberDataPointSlice,
- resource pcommon.Resource, settings Settings, name string) {
+func (c *PrometheusConverter) addGaugeNumberDataPoints(ctx context.Context, dataPoints pmetric.NumberDataPointSlice,
+ resource pcommon.Resource, settings Settings, name string) error {
for x := 0; x < dataPoints.Len(); x++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
+
pt := dataPoints.At(x)
labels := createAttributes(
resource,
@@ -55,11 +60,17 @@ func (c *PrometheusConverter) addGaugeNumberDataPoints(dataPoints pmetric.Number
}
c.addSample(sample, labels)
}
+
+ return nil
}
-func (c *PrometheusConverter) addSumNumberDataPoints(dataPoints pmetric.NumberDataPointSlice,
- resource pcommon.Resource, metric pmetric.Metric, settings Settings, name string) {
+func (c *PrometheusConverter) addSumNumberDataPoints(ctx context.Context, dataPoints pmetric.NumberDataPointSlice,
+ resource pcommon.Resource, metric pmetric.Metric, settings Settings, name string) error {
for x := 0; x < dataPoints.Len(); x++ {
+ if err := c.everyN.checkContext(ctx); err != nil {
+ return err
+ }
+
pt := dataPoints.At(x)
lbls := createAttributes(
resource,
@@ -85,7 +96,10 @@ func (c *PrometheusConverter) addSumNumberDataPoints(dataPoints pmetric.NumberDa
}
ts := c.addSample(sample, lbls)
if ts != nil {
- exemplars := getPromExemplars[pmetric.NumberDataPoint](pt)
+ exemplars, err := getPromExemplars[pmetric.NumberDataPoint](ctx, &c.everyN, pt)
+ if err != nil {
+ return err
+ }
ts.Exemplars = append(ts.Exemplars, exemplars...)
}
@@ -93,7 +107,7 @@ func (c *PrometheusConverter) addSumNumberDataPoints(dataPoints pmetric.NumberDa
if settings.ExportCreatedMetric && metric.Sum().IsMonotonic() {
startTimestamp := pt.StartTimestamp()
if startTimestamp == 0 {
- return
+ return nil
}
createdLabels := make([]prompb.Label, len(lbls))
@@ -107,4 +121,6 @@ func (c *PrometheusConverter) addSumNumberDataPoints(dataPoints pmetric.NumberDa
c.addTimeSeriesIfNeeded(createdLabels, startTimestamp, pt.Timestamp())
}
}
+
+ return nil
}
diff --git a/storage/remote/otlptranslator/prometheusremotewrite/number_data_points_test.go b/storage/remote/otlptranslator/prometheusremotewrite/number_data_points_test.go
index 41afc8c4c..e93226964 100644
--- a/storage/remote/otlptranslator/prometheusremotewrite/number_data_points_test.go
+++ b/storage/remote/otlptranslator/prometheusremotewrite/number_data_points_test.go
@@ -17,6 +17,7 @@
package prometheusremotewrite
import (
+ "context"
"testing"
"time"
@@ -66,6 +67,7 @@ func TestPrometheusConverter_addGaugeNumberDataPoints(t *testing.T) {
converter := NewPrometheusConverter()
converter.addGaugeNumberDataPoints(
+ context.Background(),
metric.Gauge().DataPoints(),
pcommon.NewResource(),
Settings{
@@ -242,6 +244,7 @@ func TestPrometheusConverter_addSumNumberDataPoints(t *testing.T) {
converter := NewPrometheusConverter()
converter.addSumNumberDataPoints(
+ context.Background(),
metric.Sum().DataPoints(),
pcommon.NewResource(),
metric,
diff --git a/storage/remote/write_handler.go b/storage/remote/write_handler.go
index 58fb668cc..736bc8eff 100644
--- a/storage/remote/write_handler.go
+++ b/storage/remote/write_handler.go
@@ -512,7 +512,7 @@ func (h *otlpWriteHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
otlpCfg := h.configFunc().OTLPConfig
converter := otlptranslator.NewPrometheusConverter()
- annots, err := converter.FromMetrics(req.Metrics(), otlptranslator.Settings{
+ annots, err := converter.FromMetrics(r.Context(), req.Metrics(), otlptranslator.Settings{
AddMetricSuffixes: true,
PromoteResourceAttributes: otlpCfg.PromoteResourceAttributes,
})
diff --git a/tsdb/encoding/encoding.go b/tsdb/encoding/encoding.go
index 88fdd30c8..a7ce4e81e 100644
--- a/tsdb/encoding/encoding.go
+++ b/tsdb/encoding/encoding.go
@@ -201,8 +201,9 @@ func (d *Decbuf) UvarintStr() string {
return string(d.UvarintBytes())
}
-// UvarintBytes returns invalid values if the byte slice goes away.
-// Compared to UvarintStr, it avoid allocations.
+// UvarintBytes returns a pointer to internal data;
+// the return value becomes invalid if the byte slice goes away.
+// Compared to UvarintStr, this avoids allocations.
func (d *Decbuf) UvarintBytes() []byte {
l := d.Uvarint64()
if d.E != nil {
diff --git a/tsdb/head.go b/tsdb/head.go
index b7bfaa0fd..4ff7aab63 100644
--- a/tsdb/head.go
+++ b/tsdb/head.go
@@ -128,6 +128,7 @@ type Head struct {
writeNotified wlog.WriteNotified
memTruncationInProcess atomic.Bool
+ memTruncationCallBack func() // For testing purposes.
}
type ExemplarStorage interface {
@@ -1129,6 +1130,10 @@ func (h *Head) truncateMemory(mint int64) (err error) {
h.memTruncationInProcess.Store(true)
defer h.memTruncationInProcess.Store(false)
+ if h.memTruncationCallBack != nil {
+ h.memTruncationCallBack()
+ }
+
// We wait for pending queries to end that overlap with this truncation.
if initialized {
h.WaitForPendingReadersInTimeRange(h.MinTime(), mint)
diff --git a/tsdb/head_test.go b/tsdb/head_test.go
index 18ec4f0ac..7b5349cfc 100644
--- a/tsdb/head_test.go
+++ b/tsdb/head_test.go
@@ -3492,6 +3492,133 @@ func TestWaitForPendingReadersInTimeRange(t *testing.T) {
}
}
+func TestQueryOOOHeadDuringTruncate(t *testing.T) {
+ testQueryOOOHeadDuringTruncate(t,
+ func(db *DB, minT, maxT int64) (storage.LabelQuerier, error) {
+ return db.Querier(minT, maxT)
+ },
+ func(t *testing.T, lq storage.LabelQuerier, minT, _ int64) {
+ // Samples
+ q, ok := lq.(storage.Querier)
+ require.True(t, ok)
+ ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
+ require.True(t, ss.Next())
+ s := ss.At()
+ require.False(t, ss.Next()) // One series.
+ it := s.Iterator(nil)
+ require.NotEqual(t, chunkenc.ValNone, it.Next()) // Has some data.
+ require.Equal(t, minT, it.AtT()) // It is an in-order sample.
+ require.NotEqual(t, chunkenc.ValNone, it.Next()) // Has some data.
+ require.Equal(t, minT+50, it.AtT()) // it is an out-of-order sample.
+ require.NoError(t, it.Err())
+ },
+ )
+}
+
+func TestChunkQueryOOOHeadDuringTruncate(t *testing.T) {
+ testQueryOOOHeadDuringTruncate(t,
+ func(db *DB, minT, maxT int64) (storage.LabelQuerier, error) {
+ return db.ChunkQuerier(minT, maxT)
+ },
+ func(t *testing.T, lq storage.LabelQuerier, minT, _ int64) {
+ // Chunks
+ q, ok := lq.(storage.ChunkQuerier)
+ require.True(t, ok)
+ ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
+ require.True(t, ss.Next())
+ s := ss.At()
+ require.False(t, ss.Next()) // One series.
+ metaIt := s.Iterator(nil)
+ require.True(t, metaIt.Next())
+ meta := metaIt.At()
+ // Samples
+ it := meta.Chunk.Iterator(nil)
+ require.NotEqual(t, chunkenc.ValNone, it.Next()) // Has some data.
+ require.Equal(t, minT, it.AtT()) // It is an in-order sample.
+ require.NotEqual(t, chunkenc.ValNone, it.Next()) // Has some data.
+ require.Equal(t, minT+50, it.AtT()) // it is an out-of-order sample.
+ require.NoError(t, it.Err())
+ },
+ )
+}
+
+func testQueryOOOHeadDuringTruncate(t *testing.T, makeQuerier func(db *DB, minT, maxT int64) (storage.LabelQuerier, error), verify func(t *testing.T, q storage.LabelQuerier, minT, maxT int64)) {
+ const maxT int64 = 6000
+
+ dir := t.TempDir()
+ opts := DefaultOptions()
+ opts.EnableNativeHistograms = true
+ opts.OutOfOrderTimeWindow = maxT
+ opts.MinBlockDuration = maxT / 2 // So that head will compact up to 3000.
+
+ db, err := Open(dir, nil, nil, opts, nil)
+ require.NoError(t, err)
+ t.Cleanup(func() {
+ require.NoError(t, db.Close())
+ })
+ db.DisableCompactions()
+
+ var (
+ ref = storage.SeriesRef(0)
+ app = db.Appender(context.Background())
+ )
+ // Add in-order samples at every 100ms starting at 0ms.
+ for i := int64(0); i < maxT; i += 100 {
+ _, err := app.Append(ref, labels.FromStrings("a", "b"), i, 0)
+ require.NoError(t, err)
+ }
+ // Add out-of-order samples at every 100ms starting at 50ms.
+ for i := int64(50); i < maxT; i += 100 {
+ _, err := app.Append(ref, labels.FromStrings("a", "b"), i, 0)
+ require.NoError(t, err)
+ }
+ require.NoError(t, app.Commit())
+
+ requireEqualOOOSamples(t, int(maxT/100-1), db)
+
+ // Synchronization points.
+ allowQueryToStart := make(chan struct{})
+ queryStarted := make(chan struct{})
+ compactionFinished := make(chan struct{})
+
+ db.head.memTruncationCallBack = func() {
+ // Compaction has started, let the query start and wait for it to actually start to simulate race condition.
+ allowQueryToStart <- struct{}{}
+ <-queryStarted
+ }
+
+ go func() {
+ db.Compact(context.Background()) // Compact and write blocks up to 3000 (maxtT/2).
+ compactionFinished <- struct{}{}
+ }()
+
+ // Wait for the compaction to start.
+ <-allowQueryToStart
+
+ q, err := makeQuerier(db, 1500, 2500)
+ require.NoError(t, err)
+ queryStarted <- struct{}{} // Unblock the compaction.
+ ctx := context.Background()
+
+ // Label names.
+ res, annots, err := q.LabelNames(ctx, nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
+ require.NoError(t, err)
+ require.Empty(t, annots)
+ require.Equal(t, []string{"a"}, res)
+
+ // Label values.
+ res, annots, err = q.LabelValues(ctx, "a", nil, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
+ require.NoError(t, err)
+ require.Empty(t, annots)
+ require.Equal(t, []string{"b"}, res)
+
+ verify(t, q, 1500, 2500)
+
+ require.NoError(t, q.Close()) // Cannot be deferred as the compaction waits for queries to close before finishing.
+
+ <-compactionFinished // Wait for compaction otherwise Go test finds stray goroutines.
+}
+
func TestAppendHistogram(t *testing.T) {
l := labels.FromStrings("a", "b")
for _, numHistograms := range []int{1, 10, 150, 200, 250, 300} {
diff --git a/tsdb/ooo_head_read.go b/tsdb/ooo_head_read.go
index 7b58ec566..66ae93325 100644
--- a/tsdb/ooo_head_read.go
+++ b/tsdb/ooo_head_read.go
@@ -513,7 +513,7 @@ type HeadAndOOOQuerier struct {
head *Head
index IndexReader
chunkr ChunkReader
- querier storage.Querier
+ querier storage.Querier // Used for LabelNames, LabelValues, but may be nil if head was truncated in the mean time, in which case we ignore it and not close it in the end.
}
func NewHeadAndOOOQuerier(mint, maxt int64, head *Head, oooIsoState *oooIsolationState, querier storage.Querier) storage.Querier {
@@ -534,15 +534,24 @@ func NewHeadAndOOOQuerier(mint, maxt int64, head *Head, oooIsoState *oooIsolatio
}
func (q *HeadAndOOOQuerier) LabelValues(ctx context.Context, name string, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) {
+ if q.querier == nil {
+ return nil, nil, nil
+ }
return q.querier.LabelValues(ctx, name, hints, matchers...)
}
func (q *HeadAndOOOQuerier) LabelNames(ctx context.Context, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) {
+ if q.querier == nil {
+ return nil, nil, nil
+ }
return q.querier.LabelNames(ctx, hints, matchers...)
}
func (q *HeadAndOOOQuerier) Close() error {
q.chunkr.Close()
+ if q.querier == nil {
+ return nil
+ }
return q.querier.Close()
}
@@ -577,15 +586,24 @@ func NewHeadAndOOOChunkQuerier(mint, maxt int64, head *Head, oooIsoState *oooIso
}
func (q *HeadAndOOOChunkQuerier) LabelValues(ctx context.Context, name string, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) {
+ if q.querier == nil {
+ return nil, nil, nil
+ }
return q.querier.LabelValues(ctx, name, hints, matchers...)
}
func (q *HeadAndOOOChunkQuerier) LabelNames(ctx context.Context, hints *storage.LabelHints, matchers ...*labels.Matcher) ([]string, annotations.Annotations, error) {
+ if q.querier == nil {
+ return nil, nil, nil
+ }
return q.querier.LabelNames(ctx, hints, matchers...)
}
func (q *HeadAndOOOChunkQuerier) Close() error {
q.chunkr.Close()
+ if q.querier == nil {
+ return nil
+ }
return q.querier.Close()
}
diff --git a/web/api/v1/api.go b/web/api/v1/api.go
index d58be211f..0ec8467fa 100644
--- a/web/api/v1/api.go
+++ b/web/api/v1/api.go
@@ -366,6 +366,9 @@ func (api *API) Register(r *route.Router) {
r.Get("/format_query", wrapAgent(api.formatQuery))
r.Post("/format_query", wrapAgent(api.formatQuery))
+ r.Get("/parse_query", wrapAgent(api.parseQuery))
+ r.Post("/parse_query", wrapAgent(api.parseQuery))
+
r.Get("/labels", wrapAgent(api.labelNames))
r.Post("/labels", wrapAgent(api.labelNames))
r.Get("/label/:name/values", wrapAgent(api.labelValues))
@@ -485,6 +488,15 @@ func (api *API) formatQuery(r *http.Request) (result apiFuncResult) {
return apiFuncResult{expr.Pretty(0), nil, nil, nil}
}
+func (api *API) parseQuery(r *http.Request) apiFuncResult {
+ expr, err := parser.ParseExpr(r.FormValue("query"))
+ if err != nil {
+ return invalidParamError(err, "query")
+ }
+
+ return apiFuncResult{data: translateAST(expr), err: nil, warnings: nil, finalizer: nil}
+}
+
func extractQueryOpts(r *http.Request) (promql.QueryOpts, error) {
var duration time.Duration
diff --git a/web/api/v1/translate_ast.go b/web/api/v1/translate_ast.go
new file mode 100644
index 000000000..afa11f16b
--- /dev/null
+++ b/web/api/v1/translate_ast.go
@@ -0,0 +1,157 @@
+// Copyright 2024 The Prometheus Authors
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package v1
+
+import (
+ "strconv"
+
+ "github.com/prometheus/prometheus/model/labels"
+ "github.com/prometheus/prometheus/promql/parser"
+)
+
+// Take a Go PromQL AST and translate it to an object that's nicely JSON-serializable
+// for the tree view in the UI.
+// TODO: Could it make sense to do this via the normal JSON marshalling methods? Maybe
+// too UI-specific though.
+func translateAST(node parser.Expr) interface{} {
+ if node == nil {
+ return nil
+ }
+
+ switch n := node.(type) {
+ case *parser.AggregateExpr:
+ return map[string]interface{}{
+ "type": "aggregation",
+ "op": n.Op.String(),
+ "expr": translateAST(n.Expr),
+ "param": translateAST(n.Param),
+ "grouping": sanitizeList(n.Grouping),
+ "without": n.Without,
+ }
+ case *parser.BinaryExpr:
+ var matching interface{}
+ if m := n.VectorMatching; m != nil {
+ matching = map[string]interface{}{
+ "card": m.Card.String(),
+ "labels": sanitizeList(m.MatchingLabels),
+ "on": m.On,
+ "include": sanitizeList(m.Include),
+ }
+ }
+
+ return map[string]interface{}{
+ "type": "binaryExpr",
+ "op": n.Op.String(),
+ "lhs": translateAST(n.LHS),
+ "rhs": translateAST(n.RHS),
+ "matching": matching,
+ "bool": n.ReturnBool,
+ }
+ case *parser.Call:
+ args := []interface{}{}
+ for _, arg := range n.Args {
+ args = append(args, translateAST(arg))
+ }
+
+ return map[string]interface{}{
+ "type": "call",
+ "func": map[string]interface{}{
+ "name": n.Func.Name,
+ "argTypes": n.Func.ArgTypes,
+ "variadic": n.Func.Variadic,
+ "returnType": n.Func.ReturnType,
+ },
+ "args": args,
+ }
+ case *parser.MatrixSelector:
+ vs := n.VectorSelector.(*parser.VectorSelector)
+ return map[string]interface{}{
+ "type": "matrixSelector",
+ "name": vs.Name,
+ "range": n.Range.Milliseconds(),
+ "offset": vs.OriginalOffset.Milliseconds(),
+ "matchers": translateMatchers(vs.LabelMatchers),
+ "timestamp": vs.Timestamp,
+ "startOrEnd": getStartOrEnd(vs.StartOrEnd),
+ }
+ case *parser.SubqueryExpr:
+ return map[string]interface{}{
+ "type": "subquery",
+ "expr": translateAST(n.Expr),
+ "range": n.Range.Milliseconds(),
+ "offset": n.OriginalOffset.Milliseconds(),
+ "step": n.Step.Milliseconds(),
+ "timestamp": n.Timestamp,
+ "startOrEnd": getStartOrEnd(n.StartOrEnd),
+ }
+ case *parser.NumberLiteral:
+ return map[string]string{
+ "type": "numberLiteral",
+ "val": strconv.FormatFloat(n.Val, 'f', -1, 64),
+ }
+ case *parser.ParenExpr:
+ return map[string]interface{}{
+ "type": "parenExpr",
+ "expr": translateAST(n.Expr),
+ }
+ case *parser.StringLiteral:
+ return map[string]interface{}{
+ "type": "stringLiteral",
+ "val": n.Val,
+ }
+ case *parser.UnaryExpr:
+ return map[string]interface{}{
+ "type": "unaryExpr",
+ "op": n.Op.String(),
+ "expr": translateAST(n.Expr),
+ }
+ case *parser.VectorSelector:
+ return map[string]interface{}{
+ "type": "vectorSelector",
+ "name": n.Name,
+ "offset": n.OriginalOffset.Milliseconds(),
+ "matchers": translateMatchers(n.LabelMatchers),
+ "timestamp": n.Timestamp,
+ "startOrEnd": getStartOrEnd(n.StartOrEnd),
+ }
+ }
+ panic("unsupported node type")
+}
+
+func sanitizeList(l []string) []string {
+ if l == nil {
+ return []string{}
+ }
+ return l
+}
+
+func translateMatchers(in []*labels.Matcher) interface{} {
+ out := []map[string]interface{}{}
+ for _, m := range in {
+ out = append(out, map[string]interface{}{
+ "name": m.Name,
+ "value": m.Value,
+ "type": m.Type.String(),
+ })
+ }
+ return out
+}
+
+func getStartOrEnd(startOrEnd parser.ItemType) interface{} {
+ if startOrEnd == 0 {
+ return nil
+ }
+
+ return startOrEnd.String()
+}
diff --git a/web/ui/README.md b/web/ui/README.md
index 465adf537..38087755e 100644
--- a/web/ui/README.md
+++ b/web/ui/README.md
@@ -1,4 +1,5 @@
## Overview
+
The `ui` directory contains static files and templates used in the web UI. For
easier distribution they are compressed (c.f. Makefile) and statically compiled
into the Prometheus binary using the embed package.
@@ -15,15 +16,23 @@ This will serve all files from your local filesystem. This is for development pu
### Introduction
-The react application is a monorepo composed by multiple different npm packages. The main one is `react-app` which
-contains the code of the react application.
+This directory contains two generations of Prometheus' React-based web UI:
+
+* `react-app`: The old 2.x web UI
+* `mantine-ui`: The new 3.x web UI
+
+Both UIs are built and compiled into Prometheus. The new UI is served by default, but a feature flag
+(`--enable-feature=old-ui`) can be used to switch back to serving the old UI.
Then you have different npm packages located in the folder `modules`. These packages are supposed to be used by the
-react-app and also by others consumers (like Thanos)
+two React apps and also by others consumers (like Thanos).
+
+While most of these applications / modules are part of the same npm workspace, the old UI in the `react-app` directory
+has been separated out of the workspace setup, since its dependencies were too incompatible.
### Pre-requisite
-To be able to build the react application you need:
+To be able to build either of the React applications, you need:
* npm >= v7
* node >= v20
@@ -38,46 +47,50 @@ need to move to the directory `web/ui` and then download and install them locall
npm consults the `package.json` and `package-lock.json` files for dependencies to install. It creates a `node_modules`
directory with all installed dependencies.
-**NOTE**: Do not run `npm install` in the `react-app` folder or in any sub folder of the `module` directory.
+**NOTE**: Do not run `npm install` in the `react-app` / `mantine-ui` folder or in any sub folder of the `module` directory.
### Upgrading npm dependencies
-As it is a monorepo, when upgrading a dependency, you have to upgrade it in every packages that composed this monorepo (
-aka, in all sub folder of `module` and in `react-app`)
+As it is a monorepo, when upgrading a dependency, you have to upgrade it in every packages that composed this monorepo
+(aka, in all sub folders of `module` and `react-app` / `mantine-ui`)
Then you have to run the command `npm install` in `web/ui` and not in a sub folder / sub package. It won't simply work.
### Running a local development server
-You can start a development server for the React UI outside of a running Prometheus server by running:
+You can start a development server for the new React UI outside of a running Prometheus server by running:
npm start
-This will open a browser window with the React app running on http://localhost:3000/. The page will reload if you make
+(For the old UI, you will have to run the same command from the `react-app` subdirectory.)
+
+This will open a browser window with the React app running on http://localhost:5173/. The page will reload if you make
edits to the source code. You will also see any lint errors in the console.
-**NOTE**: It will reload only if you change the code in `react-app` folder. Any code changes in the folder `module` is
+**NOTE**: It will reload only if you change the code in `mantine-ui` folder. Any code changes in the folder `module` is
not considered by the command `npm start`. In order to see the changes in the react-app you will have to
run `npm run build:module`
-Due to a `"proxy": "http://localhost:9090"` setting in the `package.json` file, any API requests from the React UI are
+Due to a `"proxy": "http://localhost:9090"` setting in the `mantine-ui/vite.config.ts` file, any API requests from the React UI are
proxied to `localhost` on port `9090` by the development server. This allows you to run a normal Prometheus server to
handle API requests, while iterating separately on the UI.
- [browser] ----> [localhost:3000 (dev server)] --(proxy API requests)--> [localhost:9090 (Prometheus)]
+ [browser] ----> [localhost:5173 (dev server)] --(proxy API requests)--> [localhost:9090 (Prometheus)]
### Running tests
-To run the test for the react-app and for all modules, you can simply run:
+To run the test for the new React app and for all modules, you can simply run:
```bash
npm test
```
-if you want to run the test only for a specific module, you need to go to the folder of the module and run
+(For the old UI, you will have to run the same command from the `react-app` subdirectory.)
+
+If you want to run the test only for a specific module, you need to go to the folder of the module and run
again `npm test`.
-For example, in case you only want to run the test of the react-app, go to `web/ui/react-app` and run `npm test`
+For example, in case you only want to run the test of the new React app, go to `web/ui/mantine-ui` and run `npm test`
To generate an HTML-based test coverage report, run:
@@ -93,7 +106,7 @@ running tests.
### Building the app for production
-To build a production-optimized version of the React app to a `build` subdirectory, run:
+To build a production-optimized version of both React app versions to a `static/{react-app,mantine-ui}` subdirectory, run:
npm run build
@@ -102,10 +115,10 @@ Prometheus `Makefile` when building the full binary.
### Integration into Prometheus
-To build a Prometheus binary that includes a compiled-in version of the production build of the React app, change to the
+To build a Prometheus binary that includes a compiled-in version of the production build of both React app versions, change to the
root of the repository and run:
make build
-This installs dependencies via npm, builds a production build of the React app, and then finally compiles in all web
+This installs dependencies via npm, builds a production build of both React apps, and then finally compiles in all web
assets into the Prometheus binary.
diff --git a/web/ui/build_ui.sh b/web/ui/build_ui.sh
index 9c09e661a..7046ca059 100644
--- a/web/ui/build_ui.sh
+++ b/web/ui/build_ui.sh
@@ -31,10 +31,16 @@ function buildModule() {
function buildReactApp() {
echo "build react-app"
- npm run build -w @prometheus-io/app
- rm -rf ./static/react
- mkdir -p ./static
- mv ./react-app/build ./static/react
+ (cd react-app && npm run build)
+ rm -rf ./static/react-app
+ mv ./react-app/build ./static/react-app
+}
+
+function buildMantineUI() {
+ echo "build mantine-ui"
+ npm run build -w @prometheus-io/mantine-ui
+ rm -rf ./static/mantine-ui
+ mv ./mantine-ui/dist ./static/mantine-ui
}
for i in "$@"; do
@@ -42,6 +48,7 @@ for i in "$@"; do
--all)
buildModule
buildReactApp
+ buildMantineUI
shift
;;
--build-module)
diff --git a/web/ui/mantine-ui/.gitignore b/web/ui/mantine-ui/.gitignore
new file mode 100644
index 000000000..a547bf36d
--- /dev/null
+++ b/web/ui/mantine-ui/.gitignore
@@ -0,0 +1,24 @@
+# Logs
+logs
+*.log
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+lerna-debug.log*
+
+node_modules
+dist
+dist-ssr
+*.local
+
+# Editor directories and files
+.vscode/*
+!.vscode/extensions.json
+.idea
+.DS_Store
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+*.sw?
diff --git a/web/ui/mantine-ui/README.md b/web/ui/mantine-ui/README.md
new file mode 100644
index 000000000..0d6babedd
--- /dev/null
+++ b/web/ui/mantine-ui/README.md
@@ -0,0 +1,30 @@
+# React + TypeScript + Vite
+
+This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
+
+Currently, two official plugins are available:
+
+- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
+- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
+
+## Expanding the ESLint configuration
+
+If you are developing a production application, we recommend updating the configuration to enable type aware lint rules:
+
+- Configure the top-level `parserOptions` property like this:
+
+```js
+export default {
+ // other rules...
+ parserOptions: {
+ ecmaVersion: 'latest',
+ sourceType: 'module',
+ project: ['./tsconfig.json', './tsconfig.node.json'],
+ tsconfigRootDir: __dirname,
+ },
+}
+```
+
+- Replace `plugin:@typescript-eslint/recommended` to `plugin:@typescript-eslint/recommended-type-checked` or `plugin:@typescript-eslint/strict-type-checked`
+- Optionally add `plugin:@typescript-eslint/stylistic-type-checked`
+- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and add `plugin:react/recommended` & `plugin:react/jsx-runtime` to the `extends` list
diff --git a/web/ui/mantine-ui/eslint.config.mjs b/web/ui/mantine-ui/eslint.config.mjs
new file mode 100644
index 000000000..c3cc58920
--- /dev/null
+++ b/web/ui/mantine-ui/eslint.config.mjs
@@ -0,0 +1,71 @@
+import { fixupConfigRules } from '@eslint/compat';
+import reactRefresh from 'eslint-plugin-react-refresh';
+import globals from 'globals';
+import tsParser from '@typescript-eslint/parser';
+import path from 'node:path';
+import { fileURLToPath } from 'node:url';
+import js from '@eslint/js';
+import { FlatCompat } from '@eslint/eslintrc';
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+const compat = new FlatCompat({
+ baseDirectory: __dirname,
+ recommendedConfig: js.configs.recommended,
+ allConfig: js.configs.all
+});
+
+export default [{
+ ignores: ['**/dist', '**/.eslintrc.cjs'],
+}, ...fixupConfigRules(compat.extends(
+ 'eslint:recommended',
+ 'plugin:@typescript-eslint/recommended',
+ 'plugin:react-hooks/recommended',
+)), {
+ plugins: {
+ 'react-refresh': reactRefresh,
+ },
+
+ languageOptions: {
+ globals: {
+ ...globals.browser,
+ },
+
+ parser: tsParser,
+ },
+
+ rules: {
+ 'react-refresh/only-export-components': ['warn', {
+ allowConstantExport: true,
+ }],
+
+ // Disable the base rule as it can report incorrect errors
+ 'no-unused-vars': 'off',
+
+ // Use the TypeScript-specific rule for unused vars
+ '@typescript-eslint/no-unused-vars': ['warn', {
+ argsIgnorePattern: '^_',
+ varsIgnorePattern: '^_',
+ caughtErrorsIgnorePattern: '^_',
+ }],
+
+ 'prefer-const': ['error', {
+ destructuring: 'all',
+ }],
+ },
+ },
+ // Override for Node.js-based config files
+ {
+ files: ['postcss.config.cjs'], // Specify any other config files
+ languageOptions: {
+ ecmaVersion: 2021, // Optional, set ECMAScript version
+ sourceType: 'script', // For CommonJS (non-ESM) modules
+ globals: {
+ module: 'readonly',
+ require: 'readonly',
+ process: 'readonly',
+ __dirname: 'readonly', // Include other Node.js globals if needed
+ },
+ },
+ },
+];
diff --git a/web/ui/mantine-ui/index.html b/web/ui/mantine-ui/index.html
new file mode 100644
index 000000000..deb5f7f56
--- /dev/null
+++ b/web/ui/mantine-ui/index.html
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+ TITLE_PLACEHOLDER
+
+
+
+
+
+
diff --git a/web/ui/mantine-ui/package.json b/web/ui/mantine-ui/package.json
new file mode 100644
index 000000000..0cb3fdaaf
--- /dev/null
+++ b/web/ui/mantine-ui/package.json
@@ -0,0 +1,72 @@
+{
+ "name": "@prometheus-io/mantine-ui",
+ "private": true,
+ "version": "0.54.1",
+ "type": "module",
+ "scripts": {
+ "start": "vite",
+ "build": "tsc && vite build",
+ "lint": "eslint . --report-unused-disable-directives --max-warnings 0",
+ "lint:fix": "eslint . --report-unused-disable-directives --max-warnings 0 --fix",
+ "preview": "vite preview",
+ "test": "vitest"
+ },
+ "dependencies": {
+ "@codemirror/autocomplete": "^6.18.0",
+ "@codemirror/language": "^6.10.2",
+ "@codemirror/lint": "^6.8.1",
+ "@codemirror/state": "^6.4.1",
+ "@codemirror/view": "^6.33.0",
+ "@floating-ui/dom": "^1.6.7",
+ "@lezer/common": "^1.2.1",
+ "@lezer/highlight": "^1.2.1",
+ "@mantine/code-highlight": "^7.11.2",
+ "@mantine/core": "^7.11.2",
+ "@mantine/dates": "^7.11.2",
+ "@mantine/hooks": "^7.11.2",
+ "@mantine/notifications": "^7.11.2",
+ "@nexucis/fuzzy": "^0.5.1",
+ "@nexucis/kvsearch": "^0.9.1",
+ "@prometheus-io/codemirror-promql": "^0.54.1",
+ "@reduxjs/toolkit": "^2.2.1",
+ "@tabler/icons-react": "^2.47.0",
+ "@tanstack/react-query": "^5.22.2",
+ "@testing-library/jest-dom": "^6.5.0",
+ "@testing-library/react": "^16.0.1",
+ "@types/lodash": "^4.17.7",
+ "@types/sanitize-html": "^2.13.0",
+ "@uiw/react-codemirror": "^4.23.1",
+ "clsx": "^2.1.1",
+ "dayjs": "^1.11.10",
+ "lodash": "^4.17.21",
+ "react": "^18.3.1",
+ "react-dom": "^18.3.1",
+ "react-infinite-scroll-component": "^6.1.0",
+ "react-redux": "^9.1.2",
+ "react-router-dom": "^6.26.1",
+ "sanitize-html": "^2.13.0",
+ "uplot": "^1.6.30",
+ "uplot-react": "^1.2.2",
+ "use-query-params": "^2.2.1"
+ },
+ "devDependencies": {
+ "@eslint/compat": "^1.1.1",
+ "@eslint/eslintrc": "^3.1.0",
+ "@eslint/js": "^9.9.1",
+ "@types/react": "^18.3.5",
+ "@types/react-dom": "^18.3.0",
+ "@typescript-eslint/eslint-plugin": "^6.21.0",
+ "@typescript-eslint/parser": "^6.21.0",
+ "@vitejs/plugin-react": "^4.2.1",
+ "eslint": "^9.9.1",
+ "eslint-plugin-react-hooks": "^5.1.0-rc-e56f4ae3-20240830",
+ "eslint-plugin-react-refresh": "^0.4.11",
+ "globals": "^15.9.0",
+ "jsdom": "^25.0.0",
+ "postcss": "^8.4.35",
+ "postcss-preset-mantine": "^1.17.0",
+ "postcss-simple-vars": "^7.0.1",
+ "vite": "^5.1.0",
+ "vitest": "^2.0.5"
+ }
+}
diff --git a/web/ui/mantine-ui/postcss.config.cjs b/web/ui/mantine-ui/postcss.config.cjs
new file mode 100644
index 000000000..bfba0ddfa
--- /dev/null
+++ b/web/ui/mantine-ui/postcss.config.cjs
@@ -0,0 +1,14 @@
+module.exports = {
+ plugins: {
+ 'postcss-preset-mantine': {},
+ 'postcss-simple-vars': {
+ variables: {
+ 'mantine-breakpoint-xs': '36em',
+ 'mantine-breakpoint-sm': '48em',
+ 'mantine-breakpoint-md': '62em',
+ 'mantine-breakpoint-lg': '75em',
+ 'mantine-breakpoint-xl': '88em',
+ },
+ },
+ },
+};
diff --git a/web/ui/mantine-ui/public/favicon.svg b/web/ui/mantine-ui/public/favicon.svg
new file mode 100644
index 000000000..5c51f66d9
--- /dev/null
+++ b/web/ui/mantine-ui/public/favicon.svg
@@ -0,0 +1,50 @@
+
+
+
+
\ No newline at end of file
diff --git a/web/ui/mantine-ui/src/App.module.css b/web/ui/mantine-ui/src/App.module.css
new file mode 100644
index 000000000..2a66b14f3
--- /dev/null
+++ b/web/ui/mantine-ui/src/App.module.css
@@ -0,0 +1,40 @@
+.control {
+ display: block;
+ padding: var(--mantine-spacing-xs) var(--mantine-spacing-md);
+ border-radius: var(--mantine-radius-md);
+ font-weight: 500;
+
+ @mixin hover {
+ background-color: var(--mantine-color-gray-8);
+ }
+}
+
+.link {
+ display: block;
+ line-height: 1;
+ padding: rem(8px) rem(12px);
+ border-radius: var(--mantine-radius-sm);
+ text-decoration: none;
+ color: var(--mantine-color-gray-0);
+ font-size: var(--mantine-font-size-sm);
+ font-weight: 500;
+ background-color: transparent;
+
+ @mixin hover {
+ background-color: var(--mantine-color-gray-6);
+ color: var(--mantine-color-gray-0);
+ }
+
+ [data-mantine-color-scheme] &[aria-current="page"] {
+ background-color: var(--mantine-color-blue-filled);
+ color: var(--mantine-color-white);
+ }
+}
+
+/* Font used for autocompletion item icons. */
+@font-face {
+ font-family: "codicon";
+ src:
+ local("codicon"),
+ url(./fonts/codicon.ttf) format("truetype");
+}
diff --git a/web/ui/mantine-ui/src/App.tsx b/web/ui/mantine-ui/src/App.tsx
new file mode 100644
index 000000000..dd69cff38
--- /dev/null
+++ b/web/ui/mantine-ui/src/App.tsx
@@ -0,0 +1,463 @@
+import "@mantine/core/styles.css";
+import "@mantine/code-highlight/styles.css";
+import "@mantine/notifications/styles.css";
+import "@mantine/dates/styles.css";
+import classes from "./App.module.css";
+import PrometheusLogo from "./images/prometheus-logo.svg";
+
+import {
+ AppShell,
+ Box,
+ Burger,
+ Button,
+ Group,
+ MantineProvider,
+ Menu,
+ Skeleton,
+ Text,
+ createTheme,
+ rem,
+} from "@mantine/core";
+import { useDisclosure } from "@mantine/hooks";
+import {
+ IconBell,
+ IconBellFilled,
+ IconChevronDown,
+ IconChevronRight,
+ IconCloudDataConnection,
+ IconDatabase,
+ IconDeviceDesktopAnalytics,
+ IconFlag,
+ IconHeartRateMonitor,
+ IconInfoCircle,
+ IconSearch,
+ IconServer,
+ IconServerCog,
+} from "@tabler/icons-react";
+import {
+ BrowserRouter,
+ Link,
+ NavLink,
+ Navigate,
+ Route,
+ Routes,
+} from "react-router-dom";
+import { IconTable } from "@tabler/icons-react";
+import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
+import QueryPage from "./pages/query/QueryPage";
+import AlertsPage from "./pages/AlertsPage";
+import RulesPage from "./pages/RulesPage";
+import TargetsPage from "./pages/targets/TargetsPage";
+import StatusPage from "./pages/StatusPage";
+import TSDBStatusPage from "./pages/TSDBStatusPage";
+import FlagsPage from "./pages/FlagsPage";
+import ConfigPage from "./pages/ConfigPage";
+import AgentPage from "./pages/AgentPage";
+import { Suspense, useEffect } from "react";
+import ErrorBoundary from "./components/ErrorBoundary";
+import { ThemeSelector } from "./components/ThemeSelector";
+import { Notifications } from "@mantine/notifications";
+import { useAppDispatch } from "./state/hooks";
+import { updateSettings, useSettings } from "./state/settingsSlice";
+import SettingsMenu from "./components/SettingsMenu";
+import ReadinessWrapper from "./components/ReadinessWrapper";
+import { QueryParamProvider } from "use-query-params";
+import { ReactRouter6Adapter } from "use-query-params/adapters/react-router-6";
+import ServiceDiscoveryPage from "./pages/service-discovery/ServiceDiscoveryPage";
+import AlertmanagerDiscoveryPage from "./pages/AlertmanagerDiscoveryPage";
+
+const queryClient = new QueryClient();
+
+const navIconStyle = { width: rem(16), height: rem(16) };
+
+const mainNavPages = [
+ {
+ title: "Query",
+ path: "/query",
+ icon: ,
+ element: ,
+ inAgentMode: false,
+ },
+ {
+ title: "Alerts",
+ path: "/alerts",
+ icon: ,
+ element: ,
+ inAgentMode: false,
+ },
+];
+
+const monitoringStatusPages = [
+ {
+ title: "Target health",
+ path: "/targets",
+ icon: ,
+ element: ,
+ inAgentMode: true,
+ },
+ {
+ title: "Rule health",
+ path: "/rules",
+ icon: ,
+ element: ,
+ inAgentMode: false,
+ },
+ {
+ title: "Service discovery",
+ path: "/service-discovery",
+ icon: ,
+ element: ,
+ inAgentMode: true,
+ },
+ {
+ title: "Alertmanager discovery",
+ path: "/discovered-alertmanagers",
+ icon: ,
+ element: ,
+ inAgentMode: false,
+ },
+];
+
+const serverStatusPages = [
+ {
+ title: "Runtime & build information",
+ path: "/status",
+ icon: ,
+ element: ,
+ inAgentMode: true,
+ },
+ {
+ title: "TSDB status",
+ path: "/tsdb-status",
+ icon: ,
+ element: ,
+ inAgentMode: false,
+ },
+ {
+ title: "Command-line flags",
+ path: "/flags",
+ icon: ,
+ element: ,
+ inAgentMode: true,
+ },
+ {
+ title: "Configuration",
+ path: "/config",
+ icon: ,
+ element: ,
+ inAgentMode: true,
+ },
+];
+
+const allStatusPages = [...monitoringStatusPages, ...serverStatusPages];
+
+const theme = createTheme({
+ colors: {
+ "codebox-bg": [
+ "#f5f5f5",
+ "#e7e7e7",
+ "#cdcdcd",
+ "#b2b2b2",
+ "#9a9a9a",
+ "#8b8b8b",
+ "#848484",
+ "#717171",
+ "#656565",
+ "#575757",
+ ],
+ },
+});
+
+// This dynamically/generically determines the pathPrefix by stripping the first known
+// endpoint suffix from the window location path. It works out of the box for both direct
+// hosting and reverse proxy deployments with no additional configurations required.
+const getPathPrefix = (path: string) => {
+ if (path.endsWith("/")) {
+ path = path.slice(0, -1);
+ }
+
+ const pagePaths = [
+ ...mainNavPages,
+ ...allStatusPages,
+ { path: "/agent" },
+ ].map((p) => p.path);
+
+ const pagePath = pagePaths.find((p) => path.endsWith(p));
+ return path.slice(0, path.length - (pagePath || "").length);
+};
+
+const navLinkXPadding = "md";
+
+function App() {
+ const [opened, { toggle }] = useDisclosure();
+
+ const pathPrefix = getPathPrefix(window.location.pathname);
+ const dispatch = useAppDispatch();
+
+ useEffect(() => {
+ dispatch(updateSettings({ pathPrefix }));
+ }, [pathPrefix, dispatch]);
+
+ const { agentMode, consolesLink } = useSettings();
+
+ const navLinks = (
+ <>
+ {consolesLink && (
+ }
+ px={navLinkXPadding}
+ >
+ Consoles
+
+ )}
+
+ {mainNavPages
+ .filter((p) => !agentMode || p.inAgentMode)
+ .map((p) => (
+
+ ))}
+
+
+
+ {/* }
+ target="_blank"
+ px={navLinkXPadding}
+ >
+ Help
+ */}
+ >
+ );
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Prometheus{agentMode && " Agent"}
+
+
+
+ {navLinks}
+
+
+
+
+
+
+
+
+
+
+
+
+ {navLinks}
+
+
+
+
+
+
+
+
+
+ {Array.from(Array(10), (_, i) => (
+
+ ))}
+
+ }
+ >
+
+
+ }
+ />
+ {agentMode ? (
+
+
+
+ }
+ />
+ ) : (
+ <>
+
+
+
+ }
+ />
+
+
+
+ }
+ />
+ >
+ )}
+ {allStatusPages.map((p) => (
+ {p.element}
+ }
+ />
+ ))}
+
+
+
+
+
+ {/* */}
+
+
+
+
+ );
+}
+
+export default App;
diff --git a/web/ui/mantine-ui/src/Badge.module.css b/web/ui/mantine-ui/src/Badge.module.css
new file mode 100644
index 000000000..afa27cea2
--- /dev/null
+++ b/web/ui/mantine-ui/src/Badge.module.css
@@ -0,0 +1,58 @@
+.statsBadge {
+ background-color: light-dark(
+ var(--mantine-color-gray-1),
+ var(--mantine-color-gray-8)
+ );
+ color: light-dark(var(--mantine-color-gray-7), var(--mantine-color-gray-5));
+}
+
+.labelBadge {
+ background-color: light-dark(
+ var(--mantine-color-gray-1),
+ var(--mantine-color-gray-8)
+ );
+ color: light-dark(var(--mantine-color-gray-7), var(--mantine-color-gray-5));
+}
+
+.healthOk {
+ background-color: light-dark(
+ var(--mantine-color-green-1),
+ var(--mantine-color-green-9)
+ );
+ color: light-dark(var(--mantine-color-green-9), var(--mantine-color-green-1));
+}
+
+.healthErr {
+ background-color: light-dark(
+ var(--mantine-color-red-1),
+ darken(var(--mantine-color-red-9), 0.25)
+ );
+ color: light-dark(var(--mantine-color-red-9), var(--mantine-color-red-1));
+}
+
+.healthWarn {
+ background-color: light-dark(
+ var(--mantine-color-yellow-1),
+ var(--mantine-color-yellow-9)
+ );
+ color: light-dark(
+ var(--mantine-color-yellow-9),
+ var(--mantine-color-yellow-1)
+ );
+}
+
+.healthInfo {
+ background-color: light-dark(
+ var(--mantine-color-blue-1),
+ var(--mantine-color-blue-9)
+ );
+ color: light-dark(var(--mantine-color-blue-9), var(--mantine-color-blue-1));
+}
+
+.healthUnknown {
+ background-color: light-dark(
+ var(--mantine-color-gray-2),
+ var(--mantine-color-gray-7)
+ );
+ color: light-dark(var(--mantine-color-gray-7), var(--mantine-color-gray-4));
+}
diff --git a/web/ui/mantine-ui/src/Panel.module.css b/web/ui/mantine-ui/src/Panel.module.css
new file mode 100644
index 000000000..0c90b70de
--- /dev/null
+++ b/web/ui/mantine-ui/src/Panel.module.css
@@ -0,0 +1,19 @@
+.panelHealthOk {
+ border-left: 5px solid
+ light-dark(var(--mantine-color-green-3), var(--mantine-color-green-8)) !important;
+}
+
+.panelHealthErr {
+ border-left: 5px solid
+ light-dark(var(--mantine-color-red-3), var(--mantine-color-red-9)) !important;
+}
+
+.panelHealthWarn {
+ border-left: 5px solid
+ light-dark(var(--mantine-color-orange-3), var(--mantine-color-yellow-9)) !important;
+}
+
+.panelHealthUnknown {
+ border-left: 5px solid
+ light-dark(var(--mantine-color-gray-3), var(--mantine-color-gray-6)) !important;
+}
diff --git a/web/ui/mantine-ui/src/api/api.ts b/web/ui/mantine-ui/src/api/api.ts
new file mode 100644
index 000000000..d7446d689
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/api.ts
@@ -0,0 +1,128 @@
+import { QueryKey, useQuery, useSuspenseQuery } from "@tanstack/react-query";
+import { useSettings } from "../state/settingsSlice";
+
+export const API_PATH = "api/v1";
+
+export type SuccessAPIResponse = {
+ status: "success";
+ data: T;
+ warnings?: string[];
+ infos?: string[];
+};
+
+export type ErrorAPIResponse = {
+ status: "error";
+ errorType: string;
+ error: string;
+};
+
+export type APIResponse = SuccessAPIResponse | ErrorAPIResponse;
+
+const createQueryFn =
+ ({
+ pathPrefix,
+ path,
+ params,
+ recordResponseTime,
+ }: {
+ pathPrefix: string;
+ path: string;
+ params?: Record;
+ recordResponseTime?: (time: number) => void;
+ }) =>
+ async ({ signal }: { signal: AbortSignal }) => {
+ const queryString = params
+ ? `?${new URLSearchParams(params).toString()}`
+ : "";
+
+ try {
+ const startTime = Date.now();
+
+ const res = await fetch(
+ `${pathPrefix}/${API_PATH}${path}${queryString}`,
+ {
+ cache: "no-store",
+ credentials: "same-origin",
+ signal,
+ }
+ );
+
+ if (
+ !res.ok &&
+ !res.headers.get("content-type")?.startsWith("application/json")
+ ) {
+ // For example, Prometheus may send a 503 Service Unavailable response
+ // with a "text/plain" content type when it's starting up. But the API
+ // may also respond with a JSON error message and the same error code.
+ throw new Error(res.statusText);
+ }
+
+ const apiRes = (await res.json()) as APIResponse;
+
+ if (recordResponseTime) {
+ recordResponseTime(Date.now() - startTime);
+ }
+
+ if (apiRes.status === "error") {
+ throw new Error(
+ apiRes.error !== undefined
+ ? apiRes.error
+ : 'missing "error" field in response JSON'
+ );
+ }
+
+ return apiRes as SuccessAPIResponse;
+ } catch (error) {
+ if (!(error instanceof Error)) {
+ throw new Error("Unknown error");
+ }
+
+ switch (error.name) {
+ case "TypeError":
+ throw new Error("Network error or unable to reach the server");
+ case "SyntaxError":
+ throw new Error("Invalid JSON response");
+ default:
+ throw error;
+ }
+ }
+ };
+
+type QueryOptions = {
+ key?: QueryKey;
+ path: string;
+ params?: Record;
+ enabled?: boolean;
+ recordResponseTime?: (time: number) => void;
+};
+
+export const useAPIQuery = ({
+ key,
+ path,
+ params,
+ enabled,
+ recordResponseTime,
+}: QueryOptions) => {
+ const { pathPrefix } = useSettings();
+
+ return useQuery>({
+ queryKey: key !== undefined ? key : [path, params],
+ retry: false,
+ refetchOnWindowFocus: false,
+ gcTime: 0,
+ enabled,
+ queryFn: createQueryFn({ pathPrefix, path, params, recordResponseTime }),
+ });
+};
+
+export const useSuspenseAPIQuery = ({ key, path, params }: QueryOptions) => {
+ const { pathPrefix } = useSettings();
+
+ return useSuspenseQuery>({
+ queryKey: key !== undefined ? key : [path, params],
+ retry: false,
+ refetchOnWindowFocus: false,
+ gcTime: 0,
+ queryFn: createQueryFn({ pathPrefix, path, params }),
+ });
+};
diff --git a/web/ui/mantine-ui/src/api/responseTypes/alertmanagers.ts b/web/ui/mantine-ui/src/api/responseTypes/alertmanagers.ts
new file mode 100644
index 000000000..2fcb8c23a
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/alertmanagers.ts
@@ -0,0 +1,10 @@
+export type AlertmanagerTarget = {
+ url: string;
+};
+
+// Result type for /api/v1/alertmanagers endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#alertmanagers
+export type AlertmanagersResult = {
+ activeAlertmanagers: AlertmanagerTarget[];
+ droppedAlertmanagers: AlertmanagerTarget[];
+};
diff --git a/web/ui/mantine-ui/src/api/responseTypes/config.ts b/web/ui/mantine-ui/src/api/responseTypes/config.ts
new file mode 100644
index 000000000..4f509f9e0
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/config.ts
@@ -0,0 +1,5 @@
+// Result type for /api/v1/status/config endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#config
+export default interface ConfigResult {
+ yaml: string;
+}
diff --git a/web/ui/mantine-ui/src/api/responseTypes/labelValues.ts b/web/ui/mantine-ui/src/api/responseTypes/labelValues.ts
new file mode 100644
index 000000000..3c69b79b5
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/labelValues.ts
@@ -0,0 +1,3 @@
+// Result type for /api/v1/label//values endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#querying-label-values
+export type LabelValuesResult = string[];
diff --git a/web/ui/mantine-ui/src/api/responseTypes/metadata.ts b/web/ui/mantine-ui/src/api/responseTypes/metadata.ts
new file mode 100644
index 000000000..1bd168455
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/metadata.ts
@@ -0,0 +1,6 @@
+// Result type for /api/v1/alerts endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#querying-target-metadata
+export type MetadataResult = Record<
+ string,
+ { type: string; help: string; unit: string }[]
+>;
diff --git a/web/ui/mantine-ui/src/api/responseTypes/query.ts b/web/ui/mantine-ui/src/api/responseTypes/query.ts
new file mode 100644
index 000000000..327b049b5
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/query.ts
@@ -0,0 +1,51 @@
+export interface Metric {
+ [key: string]: string;
+}
+
+export interface Histogram {
+ count: string;
+ sum: string;
+ buckets?: [number, string, string, string][];
+}
+
+export interface InstantSample {
+ metric: Metric;
+ value?: SampleValue;
+ histogram?: SampleHistogram;
+}
+
+export interface RangeSamples {
+ metric: Metric;
+ values?: SampleValue[];
+ histograms?: SampleHistogram[];
+}
+
+export type SampleValue = [number, string];
+export type SampleHistogram = [number, Histogram];
+
+// Result type for /api/v1/query endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#instant-queries
+export type InstantQueryResult =
+ | {
+ resultType: "vector";
+ result: InstantSample[];
+ }
+ | {
+ resultType: "matrix";
+ result: RangeSamples[];
+ }
+ | {
+ resultType: "scalar";
+ result: SampleValue;
+ }
+ | {
+ resultType: "string";
+ result: SampleValue;
+ };
+
+// Result type for /api/v1/query_range endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#range-queries
+export type RangeQueryResult = {
+ resultType: "matrix";
+ result: RangeSamples[];
+};
diff --git a/web/ui/mantine-ui/src/api/responseTypes/rules.ts b/web/ui/mantine-ui/src/api/responseTypes/rules.ts
new file mode 100644
index 000000000..13535315c
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/rules.ts
@@ -0,0 +1,63 @@
+type RuleState = "pending" | "firing" | "inactive";
+
+export interface Alert {
+ labels: Record;
+ state: RuleState;
+ value: string;
+ annotations: Record;
+ activeAt: string;
+ keepFiringSince: string;
+}
+
+type CommonRuleFields = {
+ name: string;
+ query: string;
+ evaluationTime: string;
+ health: "ok" | "unknown" | "err";
+ lastError?: string;
+ lastEvaluation: string;
+};
+
+export type AlertingRule = {
+ type: "alerting";
+ // For alerting rules, the 'labels' field is always present, even when there are no labels.
+ labels: Record;
+ annotations: Record;
+ duration: number;
+ keepFiringFor: number;
+ state: RuleState;
+ alerts: Alert[];
+} & CommonRuleFields;
+
+type RecordingRule = {
+ type: "recording";
+ // For recording rules, the 'labels' field is only present when there are labels.
+ labels?: Record;
+} & CommonRuleFields;
+
+export type Rule = AlertingRule | RecordingRule;
+
+interface RuleGroup {
+ name: string;
+ file: string;
+ interval: string;
+ rules: Rule[];
+ evaluationTime: string;
+ lastEvaluation: string;
+}
+
+export type AlertingRuleGroup = Omit & {
+ rules: AlertingRule[];
+};
+
+// Result type for /api/v1/alerts endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#alerts
+export interface RulesResult {
+ groups: RuleGroup[];
+}
+
+// Same as RulesResult above, but can be used when the caller ensures via a
+// "type=alert" query parameter that all rules are alerting rules.
+export interface AlertingRulesResult {
+ groups: AlertingRuleGroup[];
+}
diff --git a/web/ui/mantine-ui/src/api/responseTypes/scrapePools.ts b/web/ui/mantine-ui/src/api/responseTypes/scrapePools.ts
new file mode 100644
index 000000000..793ad7157
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/scrapePools.ts
@@ -0,0 +1,2 @@
+// Result type for /api/v1/scrape_pools endpoint.
+export type ScrapePoolsResult = { scrapePools: string[] };
diff --git a/web/ui/mantine-ui/src/api/responseTypes/series.ts b/web/ui/mantine-ui/src/api/responseTypes/series.ts
new file mode 100644
index 000000000..0c07def22
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/series.ts
@@ -0,0 +1,6 @@
+// Result type for /api/v1/series endpoint.
+
+import { Metric } from "./query";
+
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#finding-series-by-label-matchers
+export type SeriesResult = Metric[];
diff --git a/web/ui/mantine-ui/src/api/responseTypes/targets.ts b/web/ui/mantine-ui/src/api/responseTypes/targets.ts
new file mode 100644
index 000000000..cc0e891f0
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/targets.ts
@@ -0,0 +1,29 @@
+export interface Labels {
+ [key: string]: string;
+}
+
+export type Target = {
+ discoveredLabels: Labels;
+ labels: Labels;
+ scrapePool: string;
+ scrapeUrl: string;
+ globalUrl: string;
+ lastError: string;
+ lastScrape: string;
+ lastScrapeDuration: number;
+ health: string;
+ scrapeInterval: string;
+ scrapeTimeout: string;
+};
+
+export interface DroppedTarget {
+ discoveredLabels: Labels;
+}
+
+// Result type for /api/v1/targets endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#targets
+export type TargetsResult = {
+ activeTargets: Target[];
+ droppedTargets: DroppedTarget[];
+ droppedTargetCounts: Record;
+};
diff --git a/web/ui/mantine-ui/src/api/responseTypes/tsdbStatus.ts b/web/ui/mantine-ui/src/api/responseTypes/tsdbStatus.ts
new file mode 100644
index 000000000..255249757
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/tsdbStatus.ts
@@ -0,0 +1,22 @@
+interface Stats {
+ name: string;
+ value: number;
+}
+
+interface HeadStats {
+ numSeries: number;
+ numLabelPairs: number;
+ chunkCount: number;
+ minTime: number;
+ maxTime: number;
+}
+
+// Result type for /api/v1/status/tsdb endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#tsdb-stats
+export interface TSDBStatusResult {
+ headStats: HeadStats;
+ seriesCountByMetricName: Stats[];
+ labelValueCountByLabelName: Stats[];
+ memoryInBytesByLabelName: Stats[];
+ seriesCountByLabelValuePair: Stats[];
+}
diff --git a/web/ui/mantine-ui/src/api/responseTypes/walreplay.ts b/web/ui/mantine-ui/src/api/responseTypes/walreplay.ts
new file mode 100644
index 000000000..b881ba750
--- /dev/null
+++ b/web/ui/mantine-ui/src/api/responseTypes/walreplay.ts
@@ -0,0 +1,7 @@
+// Result type for /api/v1/status/walreplay endpoint.
+// See: https://prometheus.io/docs/prometheus/latest/querying/api/#wal-replay-stats
+export interface WALReplayStatus {
+ min: number;
+ max: number;
+ current: number;
+}
diff --git a/web/ui/mantine-ui/src/codemirror/theme.ts b/web/ui/mantine-ui/src/codemirror/theme.ts
new file mode 100644
index 000000000..464983759
--- /dev/null
+++ b/web/ui/mantine-ui/src/codemirror/theme.ts
@@ -0,0 +1,323 @@
+import { HighlightStyle } from "@codemirror/language";
+import { EditorView } from "@codemirror/view";
+import { tags } from "@lezer/highlight";
+
+export const baseTheme = EditorView.theme({
+ ".cm-content": {
+ paddingTop: "3px",
+ paddingBottom: "0px",
+ },
+ "&.cm-editor": {
+ "&.cm-focused": {
+ outline: "none",
+ outline_fallback: "none",
+ },
+ backgroundColor: "transparent",
+ },
+ ".cm-scroller": {
+ overflow: "hidden",
+ fontFamily: '"DejaVu Sans Mono", monospace',
+ },
+ ".cm-placeholder": {
+ fontFamily:
+ '-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,"Helvetica Neue",Arial,"Noto Sans","Liberation Sans",sans-serif,"Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol","Noto Color Emoji"',
+ },
+
+ ".cm-matchingBracket": {
+ fontWeight: "bold",
+ outline: "1px dashed transparent",
+ },
+ ".cm-nonmatchingBracket": { borderColor: "red" },
+
+ ".cm-tooltip.cm-tooltip-autocomplete": {
+ "& > ul": {
+ maxHeight: "350px",
+ fontFamily: '"DejaVu Sans Mono", monospace',
+ maxWidth: "unset",
+ },
+ "& > ul > li": {
+ padding: "2px 1em 2px 3px",
+ },
+ minWidth: "30%",
+ },
+
+ ".cm-completionDetail": {
+ float: "right",
+ color: "#999",
+ },
+
+ ".cm-tooltip.cm-completionInfo": {
+ padding: "10px",
+ fontFamily:
+ "'Open Sans', 'Lucida Sans Unicode', 'Lucida Grande', sans-serif;",
+ border: "none",
+ minWidth: "250px",
+ maxWidth: "min-content",
+ },
+
+ ".cm-completionInfo.cm-completionInfo-right": {
+ "&:before": {
+ content: "' '",
+ height: "0",
+ position: "absolute",
+ width: "0",
+ left: "-20px",
+ borderWidth: "10px",
+ borderStyle: "solid",
+ borderColor: "transparent",
+ },
+ marginTop: "-11px",
+ marginLeft: "12px",
+ },
+ ".cm-completionInfo.cm-completionInfo-left": {
+ "&:before": {
+ content: "' '",
+ height: "0",
+ position: "absolute",
+ width: "0",
+ right: "-20px",
+ borderWidth: "10px",
+ borderStyle: "solid",
+ borderColor: "transparent",
+ },
+ marginTop: "-11px",
+ marginRight: "12px",
+ },
+ ".cm-completionInfo.cm-completionInfo-right-narrow": {
+ "&:before": {
+ content: "' '",
+ height: "0",
+ position: "absolute",
+ width: "0",
+ top: "-20px",
+ borderWidth: "10px",
+ borderStyle: "solid",
+ borderColor: "transparent",
+ },
+ marginTop: "10px",
+ marginLeft: "150px",
+ },
+ ".cm-completionMatchedText": {
+ textDecoration: "none",
+ fontWeight: "bold",
+ },
+
+ ".cm-selectionMatch": {
+ backgroundColor: "#e6f3ff",
+ },
+
+ ".cm-diagnostic": {
+ "&.cm-diagnostic-error": {
+ borderLeft: "3px solid #e65013",
+ },
+ },
+
+ ".cm-completionIcon": {
+ boxSizing: "content-box",
+ fontSize: "16px",
+ lineHeight: "1",
+ marginRight: "10px",
+ verticalAlign: "top",
+ "&:after": { content: "'\\ea88'" },
+ fontFamily: "codicon",
+ paddingRight: "0",
+ opacity: "1",
+ },
+
+ ".cm-completionIcon-function, .cm-completionIcon-method": {
+ "&:after": { content: "'\\ea8c'" },
+ },
+ ".cm-completionIcon-class": {
+ "&:after": { content: "'○'" },
+ },
+ ".cm-completionIcon-interface": {
+ "&:after": { content: "'◌'" },
+ },
+ ".cm-completionIcon-variable": {
+ "&:after": { content: "'𝑥'" },
+ },
+ ".cm-completionIcon-constant": {
+ "&:after": { content: "'\\eb5f'" },
+ },
+ ".cm-completionIcon-type": {
+ "&:after": { content: "'𝑡'" },
+ },
+ ".cm-completionIcon-enum": {
+ "&:after": { content: "'∪'" },
+ },
+ ".cm-completionIcon-property": {
+ "&:after": { content: "'□'" },
+ },
+ ".cm-completionIcon-keyword": {
+ "&:after": { content: "'\\eb62'" },
+ },
+ ".cm-completionIcon-namespace": {
+ "&:after": { content: "'▢'" },
+ },
+ ".cm-completionIcon-text": {
+ "&:after": { content: "'\\ea95'" },
+ color: "#ee9d28",
+ },
+});
+
+export const lightTheme = EditorView.theme(
+ {
+ ".cm-tooltip": {
+ backgroundColor: "#f8f8f8",
+ borderColor: "rgba(52, 79, 113, 0.2)",
+ },
+
+ ".cm-tooltip.cm-tooltip-autocomplete": {
+ "& li:hover": {
+ backgroundColor: "#ddd",
+ },
+ "& > ul > li[aria-selected]": {
+ backgroundColor: "#d6ebff",
+ color: "unset",
+ },
+ },
+
+ ".cm-tooltip.cm-completionInfo": {
+ backgroundColor: "#d6ebff",
+ },
+
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-right": {
+ "&:before": {
+ borderRightColor: "#d6ebff",
+ },
+ },
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-right-narrow": {
+ "&:before": {
+ borderBottomColor: "#d6ebff",
+ },
+ },
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-left": {
+ "&:before": {
+ borderLeftColor: "#d6ebff",
+ },
+ },
+
+ ".cm-line": {
+ "&::selection": {
+ backgroundColor: "#add6ff",
+ },
+ "& > span::selection": {
+ backgroundColor: "#add6ff",
+ },
+ },
+
+ ".cm-matchingBracket": {
+ color: "#000",
+ backgroundColor: "#dedede",
+ },
+
+ ".cm-completionMatchedText": {
+ color: "#0066bf",
+ },
+
+ ".cm-completionIcon": {
+ color: "#007acc",
+ },
+
+ ".cm-completionIcon-constant": {
+ color: "#007acc",
+ },
+
+ ".cm-completionIcon-function, .cm-completionIcon-method": {
+ color: "#652d90",
+ },
+
+ ".cm-completionIcon-keyword": {
+ color: "#616161",
+ },
+ },
+ { dark: false }
+);
+
+export const darkTheme = EditorView.theme(
+ {
+ ".cm-content": {
+ caretColor: "#fff",
+ },
+
+ ".cm-tooltip.cm-completionInfo": {
+ backgroundColor: "#333338",
+ },
+
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-right": {
+ "&:before": {
+ borderRightColor: "#333338",
+ },
+ },
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-right-narrow": {
+ "&:before": {
+ borderBottomColor: "#333338",
+ },
+ },
+ ".cm-tooltip > .cm-completionInfo.cm-completionInfo-left": {
+ "&:before": {
+ borderLeftColor: "#333338",
+ },
+ },
+
+ ".cm-line": {
+ "&::selection": {
+ backgroundColor: "#767676",
+ },
+ "& > span::selection": {
+ backgroundColor: "#767676",
+ },
+ },
+
+ ".cm-matchingBracket, &.cm-focused .cm-matchingBracket": {
+ backgroundColor: "#616161",
+ },
+
+ ".cm-completionMatchedText": {
+ color: "#7dd3fc",
+ },
+
+ ".cm-completionIcon, .cm-completionIcon-constant": {
+ color: "#7dd3fc",
+ },
+
+ ".cm-completionIcon-function, .cm-completionIcon-method": {
+ color: "#d8b4fe",
+ },
+
+ ".cm-completionIcon-keyword": {
+ color: "#cbd5e1 !important",
+ },
+ },
+ { dark: true }
+);
+
+export const promqlHighlighter = HighlightStyle.define([
+ { tag: tags.number, color: "#09885a" },
+ { tag: tags.string, color: "#a31515" },
+ { tag: tags.keyword, color: "#008080" },
+ { tag: tags.function(tags.variableName), color: "#008080" },
+ { tag: tags.labelName, color: "#800000" },
+ { tag: tags.operator },
+ { tag: tags.modifier, color: "#008080" },
+ { tag: tags.paren },
+ { tag: tags.squareBracket },
+ { tag: tags.brace },
+ { tag: tags.invalid, color: "red" },
+ { tag: tags.comment, color: "#888", fontStyle: "italic" },
+]);
+
+export const darkPromqlHighlighter = HighlightStyle.define([
+ { tag: tags.number, color: "#22c55e" },
+ { tag: tags.string, color: "#fca5a5" },
+ { tag: tags.keyword, color: "#14bfad" },
+ { tag: tags.function(tags.variableName), color: "#14bfad" },
+ { tag: tags.labelName, color: "#ff8585" },
+ { tag: tags.operator },
+ { tag: tags.modifier, color: "#14bfad" },
+ { tag: tags.paren },
+ { tag: tags.squareBracket },
+ { tag: tags.brace },
+ { tag: tags.invalid, color: "#ff3d3d" },
+ { tag: tags.comment, color: "#9ca3af", fontStyle: "italic" },
+]);
diff --git a/web/ui/mantine-ui/src/components/CustomInfiniteScroll.tsx b/web/ui/mantine-ui/src/components/CustomInfiniteScroll.tsx
new file mode 100644
index 000000000..f926c299c
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/CustomInfiniteScroll.tsx
@@ -0,0 +1,54 @@
+import { ComponentType, useEffect, useState } from "react";
+import InfiniteScroll from "react-infinite-scroll-component";
+
+const initialNumberOfItemsDisplayed = 50;
+
+export interface InfiniteScrollItemsProps {
+ items: T[];
+}
+
+interface CustomInfiniteScrollProps {
+ allItems: T[];
+ child: ComponentType>;
+}
+
+const CustomInfiniteScroll = ({
+ allItems,
+ child,
+}: CustomInfiniteScrollProps) => {
+ const [items, setItems] = useState(allItems.slice(0, 50));
+ const [index, setIndex] = useState(initialNumberOfItemsDisplayed);
+ const [hasMore, setHasMore] = useState(
+ allItems.length > initialNumberOfItemsDisplayed
+ );
+ const Child = child;
+
+ useEffect(() => {
+ setItems(allItems.slice(0, initialNumberOfItemsDisplayed));
+ setHasMore(allItems.length > initialNumberOfItemsDisplayed);
+ }, [allItems]);
+
+ const fetchMoreData = () => {
+ if (items.length === allItems.length) {
+ setHasMore(false);
+ } else {
+ const newIndex = index + initialNumberOfItemsDisplayed;
+ setIndex(newIndex);
+ setItems(allItems.slice(0, newIndex));
+ }
+ };
+
+ return (
+ loading...}
+ dataLength={items.length}
+ height={items.length > 25 ? "75vh" : ""}
+ >
+
+
+ );
+};
+
+export default CustomInfiniteScroll;
diff --git a/web/ui/mantine-ui/src/components/EndpointLink.tsx b/web/ui/mantine-ui/src/components/EndpointLink.tsx
new file mode 100644
index 000000000..c9b6a8989
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/EndpointLink.tsx
@@ -0,0 +1,58 @@
+import { Anchor, Badge, Group, Stack } from "@mantine/core";
+import { FC } from "react";
+
+export interface EndpointLinkProps {
+ endpoint: string;
+ globalUrl: string;
+}
+
+const EndpointLink: FC = ({ endpoint, globalUrl }) => {
+ let url: URL;
+ let search = "";
+ let invalidURL = false;
+ try {
+ url = new URL(endpoint);
+ } catch (err: unknown) {
+ // In cases of IPv6 addresses with a Zone ID, URL may not be parseable.
+ // See https://github.com/prometheus/prometheus/issues/9760
+ // In this case, we attempt to prepare a synthetic URL with the
+ // same query parameters, for rendering purposes.
+ invalidURL = true;
+ if (endpoint.indexOf("?") > -1) {
+ search = endpoint.substring(endpoint.indexOf("?"));
+ }
+ url = new URL("http://0.0.0.0" + search);
+ }
+
+ const { host, pathname, protocol, searchParams }: URL = url;
+ const params = Array.from(searchParams.entries());
+ const displayLink = invalidURL
+ ? endpoint.replace(search, "")
+ : `${protocol}//${host}${pathname}`;
+ return (
+
+
+ {displayLink}
+
+ {params.length > 0 && (
+
+ {params.map(([labelName, labelValue]: [string, string]) => {
+ return (
+
+ {`${labelName}="${labelValue}"`}
+
+ );
+ })}
+
+ )}
+
+ );
+};
+
+export default EndpointLink;
diff --git a/web/ui/mantine-ui/src/components/ErrorBoundary.tsx b/web/ui/mantine-ui/src/components/ErrorBoundary.tsx
new file mode 100644
index 000000000..3416623d3
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/ErrorBoundary.tsx
@@ -0,0 +1,58 @@
+import { Alert } from "@mantine/core";
+import { IconAlertTriangle } from "@tabler/icons-react";
+import { Component, ErrorInfo, ReactNode } from "react";
+import { useLocation } from "react-router-dom";
+
+interface Props {
+ children?: ReactNode;
+ title?: string;
+}
+
+interface State {
+ error: Error | null;
+}
+
+class ErrorBoundary extends Component {
+ public state: State = {
+ error: null,
+ };
+
+ public static getDerivedStateFromError(error: Error): State {
+ // Update state so the next render will show the fallback UI.
+ return { error };
+ }
+
+ public componentDidCatch(error: Error, errorInfo: ErrorInfo) {
+ console.error("Uncaught error:", error, errorInfo);
+ }
+
+ public render() {
+ if (this.state.error !== null) {
+ return (
+ }
+ maw={500}
+ mx="auto"
+ mt="lg"
+ >
+ Error: {this.state.error.message}
+
+ );
+ }
+
+ return this.props.children;
+ }
+}
+
+const ResettingErrorBoundary = (props: Props) => {
+ const location = useLocation();
+ return (
+
+ {props.children}
+
+ );
+};
+
+export default ResettingErrorBoundary;
diff --git a/web/ui/mantine-ui/src/components/LabelBadges.tsx b/web/ui/mantine-ui/src/components/LabelBadges.tsx
new file mode 100644
index 000000000..f60a37f03
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/LabelBadges.tsx
@@ -0,0 +1,38 @@
+import { Badge, BadgeVariant, Group, MantineColor, Stack } from "@mantine/core";
+import { FC } from "react";
+import { escapeString } from "../lib/escapeString";
+import badgeClasses from "../Badge.module.css";
+
+export interface LabelBadgesProps {
+ labels: Record;
+ variant?: BadgeVariant;
+ color?: MantineColor;
+ wrapper?: typeof Group | typeof Stack;
+}
+
+export const LabelBadges: FC = ({
+ labels,
+ variant,
+ color,
+ wrapper: Wrapper = Group,
+}) => (
+
+ {Object.entries(labels).map(([k, v]) => {
+ return (
+
+ {k}="{escapeString(v)}"
+
+ );
+ })}
+
+);
diff --git a/web/ui/mantine-ui/src/components/ReadinessWrapper.tsx b/web/ui/mantine-ui/src/components/ReadinessWrapper.tsx
new file mode 100644
index 000000000..52ae0485c
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/ReadinessWrapper.tsx
@@ -0,0 +1,93 @@
+import { FC, PropsWithChildren, useEffect, useState } from "react";
+import { useAppDispatch } from "../state/hooks";
+import { updateSettings, useSettings } from "../state/settingsSlice";
+import { useSuspenseAPIQuery } from "../api/api";
+import { WALReplayStatus } from "../api/responseTypes/walreplay";
+import { Progress, Stack, Title } from "@mantine/core";
+import { useSuspenseQuery } from "@tanstack/react-query";
+
+const ReadinessLoader: FC = () => {
+ const { pathPrefix } = useSettings();
+ const dispatch = useAppDispatch();
+
+ // Query key is incremented every second to retrigger the status fetching.
+ const [queryKey, setQueryKey] = useState(0);
+
+ // Query readiness status.
+ const { data: ready } = useSuspenseQuery({
+ queryKey: ["ready", queryKey],
+ retry: false,
+ refetchOnWindowFocus: false,
+ gcTime: 0,
+ queryFn: async ({ signal }: { signal: AbortSignal }) => {
+ try {
+ const res = await fetch(`${pathPrefix}/-/ready`, {
+ cache: "no-store",
+ credentials: "same-origin",
+ signal,
+ });
+ switch (res.status) {
+ case 200:
+ return true;
+ case 503:
+ return false;
+ default:
+ throw new Error(res.statusText);
+ }
+ } catch (error) {
+ throw new Error("Unexpected error while fetching ready status");
+ }
+ },
+ });
+
+ // Query WAL replay status.
+ const {
+ data: {
+ data: { min, max, current },
+ },
+ } = useSuspenseAPIQuery({
+ path: "/status/walreplay",
+ key: ["walreplay", queryKey],
+ });
+
+ useEffect(() => {
+ if (ready) {
+ dispatch(updateSettings({ ready: ready }));
+ }
+ }, [ready, dispatch]);
+
+ useEffect(() => {
+ const interval = setInterval(() => setQueryKey((v) => v + 1), 1000);
+ return () => clearInterval(interval);
+ }, []);
+
+ return (
+
+ Starting up...
+ {max > 0 && (
+ <>
+
+ Replaying WAL ({current}/{max})
+
+
+ >
+ )}
+
+ );
+};
+
+export const ReadinessWrapper: FC = ({ children }) => {
+ const { ready } = useSettings();
+
+ if (ready) {
+ return <>{children}>;
+ }
+
+ return ;
+};
+
+export default ReadinessWrapper;
diff --git a/web/ui/mantine-ui/src/components/RuleDefinition.module.css b/web/ui/mantine-ui/src/components/RuleDefinition.module.css
new file mode 100644
index 000000000..15a8d4a42
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/RuleDefinition.module.css
@@ -0,0 +1,15 @@
+.codebox {
+ background-color: light-dark(
+ var(--mantine-color-gray-1),
+ var(--mantine-color-gray-9)
+ );
+}
+
+.queryButton {
+ opacity: 0;
+ transition: opacity 0.1s ease-in-out;
+}
+
+.codebox:hover .queryButton {
+ opacity: 1;
+}
diff --git a/web/ui/mantine-ui/src/components/RuleDefinition.tsx b/web/ui/mantine-ui/src/components/RuleDefinition.tsx
new file mode 100644
index 000000000..10944f6e7
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/RuleDefinition.tsx
@@ -0,0 +1,116 @@
+import {
+ ActionIcon,
+ Badge,
+ Box,
+ Card,
+ Group,
+ rem,
+ Table,
+ Tooltip,
+ useComputedColorScheme,
+} from "@mantine/core";
+import { IconClockPause, IconClockPlay, IconSearch } from "@tabler/icons-react";
+import { FC } from "react";
+import { formatPrometheusDuration } from "../lib/formatTime";
+import codeboxClasses from "./RuleDefinition.module.css";
+import { Rule } from "../api/responseTypes/rules";
+import CodeMirror, { EditorView } from "@uiw/react-codemirror";
+import { syntaxHighlighting } from "@codemirror/language";
+import {
+ baseTheme,
+ darkPromqlHighlighter,
+ lightTheme,
+ promqlHighlighter,
+} from "../codemirror/theme";
+import { PromQLExtension } from "@prometheus-io/codemirror-promql";
+import { LabelBadges } from "./LabelBadges";
+import { useSettings } from "../state/settingsSlice";
+
+const promqlExtension = new PromQLExtension();
+
+const RuleDefinition: FC<{ rule: Rule }> = ({ rule }) => {
+ const theme = useComputedColorScheme();
+ const { pathPrefix } = useSettings();
+
+ return (
+ <>
+
+
+
+
+ {
+ window.open(
+ `${pathPrefix}/query?g0.expr=${encodeURIComponent(rule.query)}&g0.tab=1`,
+ "_blank"
+ );
+ }}
+ className={codeboxClasses.queryButton}
+ >
+
+
+
+
+ {rule.type === "alerting" && (
+
+ {rule.duration && (
+ }
+ >
+ for: {formatPrometheusDuration(rule.duration * 1000)}
+
+ )}
+ {rule.keepFiringFor && (
+ }
+ >
+ keep_firing_for: {formatPrometheusDuration(rule.duration * 1000)}
+
+ )}
+
+ )}
+ {rule.labels && Object.keys(rule.labels).length > 0 && (
+
+
+
+ )}
+ {rule.type === "alerting" && Object.keys(rule.annotations).length > 0 && (
+
+
+ {Object.entries(rule.annotations).map(([k, v]) => (
+
+
+ {k}
+
+ {v}
+
+ ))}
+
+
+ )}
+ >
+ );
+};
+
+export default RuleDefinition;
diff --git a/web/ui/mantine-ui/src/components/SettingsMenu.tsx b/web/ui/mantine-ui/src/components/SettingsMenu.tsx
new file mode 100644
index 000000000..0d004bd4b
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/SettingsMenu.tsx
@@ -0,0 +1,107 @@
+import { Popover, ActionIcon, Fieldset, Checkbox, Stack } from "@mantine/core";
+import { IconSettings } from "@tabler/icons-react";
+import { FC } from "react";
+import { useAppDispatch } from "../state/hooks";
+import { updateSettings, useSettings } from "../state/settingsSlice";
+
+const SettingsMenu: FC = () => {
+ const {
+ useLocalTime,
+ enableQueryHistory,
+ enableAutocomplete,
+ enableSyntaxHighlighting,
+ enableLinter,
+ showAnnotations,
+ } = useSettings();
+ const dispatch = useAppDispatch();
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
+
+export default SettingsMenu;
diff --git a/web/ui/mantine-ui/src/components/StateMultiSelect.tsx b/web/ui/mantine-ui/src/components/StateMultiSelect.tsx
new file mode 100644
index 000000000..b986d7aa5
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/StateMultiSelect.tsx
@@ -0,0 +1,142 @@
+import { FC } from "react";
+import {
+ CheckIcon,
+ Combobox,
+ ComboboxChevron,
+ ComboboxClearButton,
+ Group,
+ Pill,
+ PillsInput,
+ useCombobox,
+} from "@mantine/core";
+import { IconHeartRateMonitor } from "@tabler/icons-react";
+
+interface StatePillProps extends React.ComponentPropsWithoutRef<"div"> {
+ value: string;
+ onRemove?: () => void;
+}
+
+export function StatePill({ value, onRemove, ...others }: StatePillProps) {
+ return (
+
+ {value}
+
+ );
+}
+
+interface StateMultiSelectProps {
+ options: string[];
+ optionClass: (option: string) => string;
+ optionCount?: (option: string) => number;
+ placeholder: string;
+ values: string[];
+ onChange: (values: string[]) => void;
+}
+
+export const StateMultiSelect: FC = ({
+ options,
+ optionClass,
+ optionCount,
+ placeholder,
+ values,
+ onChange,
+}) => {
+ const combobox = useCombobox({
+ onDropdownClose: () => combobox.resetSelectedOption(),
+ onDropdownOpen: () => combobox.updateSelectedOptionIndex("active"),
+ });
+
+ const handleValueSelect = (val: string) =>
+ onChange(
+ values.includes(val) ? values.filter((v) => v !== val) : [...values, val]
+ );
+
+ const handleValueRemove = (val: string) =>
+ onChange(values.filter((v) => v !== val));
+
+ const renderedValues = values.map((item) => (
+ handleValueRemove(item)}
+ key={item}
+ />
+ ));
+
+ return (
+
+
+ combobox.toggleDropdown()}
+ miw={200}
+ leftSection={ }
+ rightSection={
+ values.length > 0 ? (
+ onChange([])} />
+ ) : (
+
+ )
+ }
+ >
+
+ {renderedValues.length > 0 ? (
+ renderedValues
+ ) : (
+
+ )}
+
+
+ combobox.closeDropdown()}
+ onKeyDown={(event) => {
+ if (event.key === "Backspace") {
+ event.preventDefault();
+ handleValueRemove(values[values.length - 1]);
+ }
+ }}
+ />
+
+
+
+
+
+
+
+ {options.map((value) => {
+ return (
+
+
+ {values.includes(value) ? (
+
+ ) : null}
+
+
+
+ );
+ })}
+
+
+
+ );
+};
diff --git a/web/ui/mantine-ui/src/components/ThemeSelector.tsx b/web/ui/mantine-ui/src/components/ThemeSelector.tsx
new file mode 100644
index 000000000..a7f5d1dca
--- /dev/null
+++ b/web/ui/mantine-ui/src/components/ThemeSelector.tsx
@@ -0,0 +1,64 @@
+import {
+ useMantineColorScheme,
+ SegmentedControl,
+ rem,
+ MantineColorScheme,
+ Tooltip,
+} from "@mantine/core";
+import {
+ IconMoonFilled,
+ IconSunFilled,
+ IconUserFilled,
+} from "@tabler/icons-react";
+import { FC } from "react";
+
+export const ThemeSelector: FC = () => {
+ const { colorScheme, setColorScheme } = useMantineColorScheme();
+ const iconProps = {
+ style: { width: rem(20), height: rem(20), display: "block" },
+ stroke: 1.5,
+ };
+
+ return (
+ setColorScheme(v as MantineColorScheme)}
+ data={[
+ {
+ value: "light",
+ label: (
+
+
+
+ ),
+ },
+ {
+ value: "dark",
+ label: (
+
+
+
+ ),
+ },
+ {
+ value: "auto",
+ label: (
+
+
+
+ ),
+ },
+ ]}
+ />
+ );
+};
diff --git a/web/ui/mantine-ui/src/fonts/codicon.ttf b/web/ui/mantine-ui/src/fonts/codicon.ttf
new file mode 100644
index 000000000..82acc8995
Binary files /dev/null and b/web/ui/mantine-ui/src/fonts/codicon.ttf differ
diff --git a/web/ui/mantine-ui/src/images/prometheus-logo.svg b/web/ui/mantine-ui/src/images/prometheus-logo.svg
new file mode 100644
index 000000000..b914095ec
--- /dev/null
+++ b/web/ui/mantine-ui/src/images/prometheus-logo.svg
@@ -0,0 +1,19 @@
+
+
+
+
diff --git a/web/ui/mantine-ui/src/lib/escapeString.ts b/web/ui/mantine-ui/src/lib/escapeString.ts
new file mode 100644
index 000000000..5fc1955de
--- /dev/null
+++ b/web/ui/mantine-ui/src/lib/escapeString.ts
@@ -0,0 +1,4 @@
+// Used for escaping escape sequences and double quotes in double-quoted strings.
+export const escapeString = (str: string) => {
+ return str.replace(/([\\"])/g, "\\$1");
+};
diff --git a/web/ui/mantine-ui/src/lib/formatFloatValue.ts b/web/ui/mantine-ui/src/lib/formatFloatValue.ts
new file mode 100644
index 000000000..6b45e626f
--- /dev/null
+++ b/web/ui/mantine-ui/src/lib/formatFloatValue.ts
@@ -0,0 +1,21 @@
+export const parsePrometheusFloat = (str: string): number => {
+ switch (str) {
+ case "+Inf":
+ return Infinity;
+ case "-Inf":
+ return -Infinity;
+ default:
+ return parseFloat(str);
+ }
+};
+
+export const formatPrometheusFloat = (num: number): string => {
+ switch (num) {
+ case Infinity:
+ return "+Inf";
+ case -Infinity:
+ return "-Inf";
+ default:
+ return num.toString();
+ }
+};
diff --git a/web/ui/mantine-ui/src/lib/formatSeries.ts b/web/ui/mantine-ui/src/lib/formatSeries.ts
new file mode 100644
index 000000000..b79c40076
--- /dev/null
+++ b/web/ui/mantine-ui/src/lib/formatSeries.ts
@@ -0,0 +1,12 @@
+import { escapeString } from "./escapeString";
+
+export const formatSeries = (labels: { [key: string]: string }): string => {
+ if (labels === null) {
+ return "scalar";
+ }
+
+ return `${labels.__name__ || ""}{${Object.entries(labels)
+ .filter(([k]) => k !== "__name__")
+ .map(([k, v]) => `${k}="${escapeString(v)}"`)
+ .join(", ")}}`;
+};
diff --git a/web/ui/mantine-ui/src/lib/formatTime.ts b/web/ui/mantine-ui/src/lib/formatTime.ts
new file mode 100644
index 000000000..c92b564f4
--- /dev/null
+++ b/web/ui/mantine-ui/src/lib/formatTime.ts
@@ -0,0 +1,136 @@
+import dayjs from "dayjs";
+import duration from "dayjs/plugin/duration";
+dayjs.extend(duration);
+import utc from "dayjs/plugin/utc";
+dayjs.extend(utc);
+
+// Parse Prometheus-specific duration strings such as "5m" or "1d2h3m4s" into milliseconds.
+export const parsePrometheusDuration = (durationStr: string): number | null => {
+ if (durationStr === "") {
+ return null;
+ }
+ if (durationStr === "0") {
+ // Allow 0 without a unit.
+ return 0;
+ }
+
+ const durationRE = new RegExp(
+ "^(([0-9]+)y)?(([0-9]+)w)?(([0-9]+)d)?(([0-9]+)h)?(([0-9]+)m)?(([0-9]+)s)?(([0-9]+)ms)?$"
+ );
+ const matches = durationStr.match(durationRE);
+ if (!matches) {
+ return null;
+ }
+
+ let dur = 0;
+
+ // Parse the match at pos `pos` in the regex and use `mult` to turn that
+ // into ms, then add that value to the total parsed duration.
+ const m = (pos: number, mult: number) => {
+ if (matches[pos] === undefined) {
+ return;
+ }
+ const n = parseInt(matches[pos]);
+ dur += n * mult;
+ };
+
+ m(2, 1000 * 60 * 60 * 24 * 365); // y
+ m(4, 1000 * 60 * 60 * 24 * 7); // w
+ m(6, 1000 * 60 * 60 * 24); // d
+ m(8, 1000 * 60 * 60); // h
+ m(10, 1000 * 60); // m
+ m(12, 1000); // s
+ m(14, 1); // ms
+
+ return dur;
+};
+
+// Format a duration in milliseconds into a Prometheus duration string like "1d2h3m4s".
+export const formatPrometheusDuration = (d: number): string => {
+ let ms = d;
+ let r = "";
+ if (ms === 0) {
+ return "0s";
+ }
+
+ const f = (unit: string, mult: number, exact: boolean) => {
+ if (exact && ms % mult !== 0) {
+ return;
+ }
+ const v = Math.floor(ms / mult);
+ if (v > 0) {
+ r += `${v}${unit}`;
+ ms -= v * mult;
+ }
+ };
+
+ // Only format years and weeks if the remainder is zero, as it is often
+ // easier to read 90d than 12w6d.
+ f("y", 1000 * 60 * 60 * 24 * 365, true);
+ f("w", 1000 * 60 * 60 * 24 * 7, true);
+
+ f("d", 1000 * 60 * 60 * 24, false);
+ f("h", 1000 * 60 * 60, false);
+ f("m", 1000 * 60, false);
+ f("s", 1000, false);
+ f("ms", 1, false);
+
+ return r;
+};
+
+export function parseTime(timeText: string): number {
+ return dayjs.utc(timeText).valueOf();
+}
+
+export const now = (): number => dayjs().valueOf();
+
+export const humanizeDuration = (milliseconds: number): string => {
+ if (milliseconds === 0) {
+ return "0s";
+ }
+
+ const sign = milliseconds < 0 ? "-" : "";
+ const duration = dayjs.duration(Math.abs(milliseconds), "ms");
+ const ms = Math.floor(duration.milliseconds());
+ const s = Math.floor(duration.seconds());
+ const m = Math.floor(duration.minutes());
+ const h = Math.floor(duration.hours());
+ const d = Math.floor(duration.asDays());
+ const parts: string[] = [];
+ if (d !== 0) {
+ parts.push(`${d}d`);
+ }
+ if (h !== 0) {
+ parts.push(`${h}h`);
+ }
+ if (m !== 0) {
+ parts.push(`${m}m`);
+ }
+ if (s !== 0) {
+ if (ms !== 0) {
+ parts.push(`${s}.${ms}s`);
+ } else {
+ parts.push(`${s}s`);
+ }
+ } else if (milliseconds !== 0) {
+ parts.push(`${milliseconds.toFixed(3)}ms`);
+ }
+ return sign + parts.join(" ");
+};
+
+export const humanizeDurationRelative = (
+ startStr: string,
+ end: number,
+ suffix: string = " ago"
+): string => {
+ const start = parseTime(startStr);
+ if (start < 0) {
+ return "never";
+ }
+ return humanizeDuration(end - start) + suffix;
+};
+
+export const formatTimestamp = (t: number, useLocalTime: boolean) =>
+ useLocalTime
+ ? dayjs.unix(t).tz(dayjs.tz.guess()).format()
+ : dayjs.unix(t).utc().format();
diff --git a/web/ui/mantine-ui/src/main.tsx b/web/ui/mantine-ui/src/main.tsx
new file mode 100644
index 000000000..71b3cf937
--- /dev/null
+++ b/web/ui/mantine-ui/src/main.tsx
@@ -0,0 +1,15 @@
+import React from "react";
+import ReactDOM from "react-dom/client";
+import App from "./App.tsx";
+import store from "./state/store.ts";
+import { Provider } from "react-redux";
+import "./fonts/codicon.ttf";
+import "./promql.css";
+
+ReactDOM.createRoot(document.getElementById("root")!).render(
+
+
+
+
+
+);
diff --git a/web/ui/mantine-ui/src/pages/AgentPage.tsx b/web/ui/mantine-ui/src/pages/AgentPage.tsx
new file mode 100644
index 000000000..34c0a6708
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/AgentPage.tsx
@@ -0,0 +1,27 @@
+import { Card, Group, Text } from "@mantine/core";
+import { IconSpy } from "@tabler/icons-react";
+import { FC } from "react";
+
+const AgentPage: FC = () => {
+ return (
+
+
+
+
+ Prometheus Agent
+
+
+
+ This Prometheus instance is running in agent mode. In
+ this mode, Prometheus is only used to scrape discovered targets and
+ forward the scraped metrics to remote write endpoints.
+
+
+ Some features are not available in this mode, such as querying and
+ alerting.
+
+
+ );
+};
+
+export default AgentPage;
diff --git a/web/ui/mantine-ui/src/pages/AlertmanagerDiscoveryPage.tsx b/web/ui/mantine-ui/src/pages/AlertmanagerDiscoveryPage.tsx
new file mode 100644
index 000000000..2ea248117
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/AlertmanagerDiscoveryPage.tsx
@@ -0,0 +1,80 @@
+import { Alert, Card, Group, Stack, Table, Text } from "@mantine/core";
+import { IconBell, IconBellOff, IconInfoCircle } from "@tabler/icons-react";
+
+import { useSuspenseAPIQuery } from "../api/api";
+import { AlertmanagersResult } from "../api/responseTypes/alertmanagers";
+import EndpointLink from "../components/EndpointLink";
+
+export const targetPoolDisplayLimit = 20;
+
+export default function AlertmanagerDiscoveryPage() {
+ // Load the list of all available scrape pools.
+ const {
+ data: {
+ data: { activeAlertmanagers, droppedAlertmanagers },
+ },
+ } = useSuspenseAPIQuery({
+ path: `/alertmanagers`,
+ });
+
+ return (
+
+
+
+
+
+ Active Alertmanagers
+
+
+ {activeAlertmanagers.length === 0 ? (
+ }>
+ No active alertmanagers found.
+
+ ) : (
+
+
+ {activeAlertmanagers.map((alertmanager) => (
+
+
+
+
+
+ ))}
+
+
+ )}
+
+
+
+
+
+ Dropped Alertmanagers
+
+
+ {droppedAlertmanagers.length === 0 ? (
+ }>
+ No dropped alertmanagers found.
+
+ ) : (
+
+
+ {droppedAlertmanagers.map((alertmanager) => (
+
+
+
+
+
+ ))}
+
+
+ )}
+
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/AlertsPage.tsx b/web/ui/mantine-ui/src/pages/AlertsPage.tsx
new file mode 100644
index 000000000..852236f86
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/AlertsPage.tsx
@@ -0,0 +1,413 @@
+import {
+ Card,
+ Group,
+ Table,
+ Text,
+ Accordion,
+ Badge,
+ Tooltip,
+ Box,
+ Stack,
+ Alert,
+ TextInput,
+ Anchor,
+} from "@mantine/core";
+import { useSuspenseAPIQuery } from "../api/api";
+import { AlertingRule, AlertingRulesResult } from "../api/responseTypes/rules";
+import badgeClasses from "../Badge.module.css";
+import panelClasses from "../Panel.module.css";
+import RuleDefinition from "../components/RuleDefinition";
+import { humanizeDurationRelative, now } from "../lib/formatTime";
+import { Fragment, useMemo } from "react";
+import { StateMultiSelect } from "../components/StateMultiSelect";
+import { IconInfoCircle, IconSearch } from "@tabler/icons-react";
+import { LabelBadges } from "../components/LabelBadges";
+import { useSettings } from "../state/settingsSlice";
+import {
+ ArrayParam,
+ BooleanParam,
+ StringParam,
+ useQueryParam,
+ withDefault,
+} from "use-query-params";
+import { useDebouncedValue } from "@mantine/hooks";
+import { KVSearch } from "@nexucis/kvsearch";
+
+type AlertsPageData = {
+ // How many rules are in each state across all groups.
+ globalCounts: {
+ inactive: number;
+ pending: number;
+ firing: number;
+ };
+ groups: {
+ name: string;
+ file: string;
+ // How many rules are in each state for this group.
+ counts: {
+ total: number;
+ inactive: number;
+ pending: number;
+ firing: number;
+ };
+ rules: {
+ rule: AlertingRule;
+ // How many alerts are in each state for this rule.
+ counts: {
+ firing: number;
+ pending: number;
+ };
+ }[];
+ }[];
+};
+
+const kvSearch = new KVSearch({
+ shouldSort: true,
+ indexedKeys: ["name", "labels", ["labels", /.*/]],
+});
+
+const buildAlertsPageData = (
+ data: AlertingRulesResult,
+ search: string,
+ stateFilter: (string | null)[]
+) => {
+ const pageData: AlertsPageData = {
+ globalCounts: {
+ inactive: 0,
+ pending: 0,
+ firing: 0,
+ },
+ groups: [],
+ };
+
+ for (const group of data.groups) {
+ const groupCounts = {
+ total: 0,
+ inactive: 0,
+ pending: 0,
+ firing: 0,
+ };
+
+ for (const r of group.rules) {
+ groupCounts.total++;
+ switch (r.state) {
+ case "inactive":
+ pageData.globalCounts.inactive++;
+ groupCounts.inactive++;
+ break;
+ case "firing":
+ pageData.globalCounts.firing++;
+ groupCounts.firing++;
+ break;
+ case "pending":
+ pageData.globalCounts.pending++;
+ groupCounts.pending++;
+ break;
+ default:
+ throw new Error(`Unknown rule state: ${r.state}`);
+ }
+ }
+
+ const filteredRules: AlertingRule[] = (
+ search === ""
+ ? group.rules
+ : kvSearch.filter(search, group.rules).map((value) => value.original)
+ ).filter((r) => stateFilter.length === 0 || stateFilter.includes(r.state));
+
+ pageData.groups.push({
+ name: group.name,
+ file: group.file,
+ counts: groupCounts,
+ rules: filteredRules.map((r) => ({
+ rule: r,
+ counts: {
+ firing: r.alerts.filter((a) => a.state === "firing").length,
+ pending: r.alerts.filter((a) => a.state === "pending").length,
+ },
+ })),
+ });
+ }
+
+ return pageData;
+};
+
+export default function AlertsPage() {
+ // Fetch the alerting rules data.
+ const { data } = useSuspenseAPIQuery({
+ path: `/rules`,
+ params: {
+ type: "alert",
+ },
+ });
+
+ const { showAnnotations } = useSettings();
+
+ // Define URL query params.
+ const [stateFilter, setStateFilter] = useQueryParam(
+ "state",
+ withDefault(ArrayParam, [])
+ );
+ const [searchFilter, setSearchFilter] = useQueryParam(
+ "search",
+ withDefault(StringParam, "")
+ );
+ const [debouncedSearch] = useDebouncedValue(searchFilter.trim(), 250);
+ const [showEmptyGroups, setShowEmptyGroups] = useQueryParam(
+ "showEmptyGroups",
+ withDefault(BooleanParam, true)
+ );
+
+ // Update the page data whenever the fetched data or filters change.
+ const alertsPageData: AlertsPageData = useMemo(
+ () => buildAlertsPageData(data.data, debouncedSearch, stateFilter),
+ [data, stateFilter, debouncedSearch]
+ );
+
+ const shownGroups = showEmptyGroups
+ ? alertsPageData.groups
+ : alertsPageData.groups.filter((g) => g.rules.length > 0);
+
+ return (
+
+
+
+ o === "inactive"
+ ? badgeClasses.healthOk
+ : o === "pending"
+ ? badgeClasses.healthWarn
+ : badgeClasses.healthErr
+ }
+ optionCount={(o) =>
+ alertsPageData.globalCounts[
+ o as keyof typeof alertsPageData.globalCounts
+ ]
+ }
+ placeholder="Filter by rule state"
+ values={(stateFilter?.filter((v) => v !== null) as string[]) || []}
+ onChange={(values) => setStateFilter(values)}
+ />
+ }
+ placeholder="Filter by rule name or labels"
+ value={searchFilter || ""}
+ onChange={(event) =>
+ setSearchFilter(event.currentTarget.value || null)
+ }
+ >
+
+ {alertsPageData.groups.length === 0 ? (
+ }>
+ No rules found.
+
+ ) : (
+ !showEmptyGroups &&
+ alertsPageData.groups.length !== shownGroups.length && (
+ }
+ >
+ Hiding {alertsPageData.groups.length - shownGroups.length} empty
+ groups due to filters or no rules.
+ setShowEmptyGroups(true)}>
+ Show empty groups
+
+
+ )
+ )}
+
+ {shownGroups.map((g, i) => {
+ return (
+
+
+
+
+ {g.name}
+
+
+ {g.file}
+
+
+
+ {g.counts.firing > 0 && (
+
+ firing ({g.counts.firing})
+
+ )}
+ {g.counts.pending > 0 && (
+
+ pending ({g.counts.pending})
+
+ )}
+ {g.counts.inactive > 0 && (
+
+ inactive ({g.counts.inactive})
+
+ )}
+
+
+ {g.counts.total === 0 ? (
+ }>
+ No rules in this group.
+ setShowEmptyGroups(false)}
+ >
+ Hide empty groups
+
+
+ ) : g.rules.length === 0 ? (
+ }>
+ No rules in this group match your filter criteria (omitted{" "}
+ {g.counts.total} filtered rules).
+ setShowEmptyGroups(false)}
+ >
+ Hide empty groups
+
+
+ ) : (
+
+ {g.rules.map((r, j) => {
+ return (
+ 0
+ ? panelClasses.panelHealthErr
+ : r.counts.pending > 0
+ ? panelClasses.panelHealthWarn
+ : panelClasses.panelHealthOk
+ }
+ >
+
+
+ {r.rule.name}
+
+ {r.counts.firing > 0 && (
+
+ firing ({r.counts.firing})
+
+ )}
+ {r.counts.pending > 0 && (
+
+ pending ({r.counts.pending})
+
+ )}
+
+
+
+
+
+ {r.rule.alerts.length > 0 && (
+
+
+
+ Alert labels
+ State
+ Active Since
+ Value
+
+
+
+ {r.rule.type === "alerting" &&
+ r.rule.alerts.map((a, k) => (
+
+
+
+
+
+
+
+ {a.state}
+
+
+
+
+
+ {humanizeDurationRelative(
+ a.activeAt,
+ now(),
+ ""
+ )}
+
+
+
+
+ {isNaN(Number(a.value))
+ ? a.value
+ : Number(a.value)}
+
+
+ {showAnnotations && (
+
+
+
+
+ {Object.entries(
+ a.annotations
+ ).map(([k, v]) => (
+
+
+ {k}
+
+ {v}
+
+ ))}
+
+
+
+
+ )}
+
+ ))}
+
+
+ )}
+
+
+ );
+ })}
+
+ )}
+
+ );
+ })}
+
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/ConfigPage.tsx b/web/ui/mantine-ui/src/pages/ConfigPage.tsx
new file mode 100644
index 000000000..a8419a8db
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/ConfigPage.tsx
@@ -0,0 +1,23 @@
+import { CodeHighlight } from "@mantine/code-highlight";
+import { useSuspenseAPIQuery } from "../api/api";
+import ConfigResult from "../api/responseTypes/config";
+
+export default function ConfigPage() {
+ const {
+ data: {
+ data: { yaml },
+ },
+ } = useSuspenseAPIQuery({ path: `/status/config` });
+
+ return (
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/FlagsPage.module.css b/web/ui/mantine-ui/src/pages/FlagsPage.module.css
new file mode 100644
index 000000000..221b2de02
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/FlagsPage.module.css
@@ -0,0 +1,21 @@
+.th {
+ padding: 0;
+}
+
+.control {
+ width: 100%;
+ padding: var(--mantine-spacing-xs) var(--mantine-spacing-md);
+
+ @mixin hover {
+ background-color: light-dark(
+ var(--mantine-color-gray-0),
+ var(--mantine-color-dark-6)
+ );
+ }
+}
+
+.icon {
+ width: rem(21px);
+ height: rem(21px);
+ border-radius: rem(21px);
+}
diff --git a/web/ui/mantine-ui/src/pages/FlagsPage.tsx b/web/ui/mantine-ui/src/pages/FlagsPage.tsx
new file mode 100644
index 000000000..d90328f96
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/FlagsPage.tsx
@@ -0,0 +1,182 @@
+import { useState } from "react";
+import {
+ Table,
+ UnstyledButton,
+ Group,
+ Text,
+ Center,
+ TextInput,
+ rem,
+ keys,
+ Card,
+} from "@mantine/core";
+import {
+ IconSelector,
+ IconChevronDown,
+ IconChevronUp,
+ IconSearch,
+} from "@tabler/icons-react";
+import classes from "./FlagsPage.module.css";
+import { useSuspenseAPIQuery } from "../api/api";
+
+interface RowData {
+ flag: string;
+ value: string;
+}
+
+interface ThProps {
+ children: React.ReactNode;
+ reversed: boolean;
+ sorted: boolean;
+ onSort(): void;
+}
+
+function Th({ children, reversed, sorted, onSort }: ThProps) {
+ const Icon = sorted
+ ? reversed
+ ? IconChevronUp
+ : IconChevronDown
+ : IconSelector;
+ return (
+
+
+
+
+ {children}
+
+
+
+
+
+
+
+ );
+}
+
+function filterData(data: RowData[], search: string) {
+ const query = search.toLowerCase().trim();
+ return data.filter((item) =>
+ keys(data[0]).some((key) => item[key].toLowerCase().includes(query))
+ );
+}
+
+function sortData(
+ data: RowData[],
+ payload: { sortBy: keyof RowData | null; reversed: boolean; search: string }
+) {
+ const { sortBy } = payload;
+
+ if (!sortBy) {
+ return filterData(data, payload.search);
+ }
+
+ return filterData(
+ [...data].sort((a, b) => {
+ if (payload.reversed) {
+ return b[sortBy].localeCompare(a[sortBy]);
+ }
+
+ return a[sortBy].localeCompare(b[sortBy]);
+ }),
+ payload.search
+ );
+}
+
+export default function FlagsPage() {
+ const { data } = useSuspenseAPIQuery>({
+ path: `/status/flags`,
+ });
+
+ const flags = Object.entries(data.data).map(([flag, value]) => ({
+ flag,
+ value,
+ }));
+
+ const [search, setSearch] = useState("");
+ const [sortedData, setSortedData] = useState(flags);
+ const [sortBy, setSortBy] = useState(null);
+ const [reverseSortDirection, setReverseSortDirection] = useState(false);
+
+ const setSorting = (field: keyof RowData) => {
+ const reversed = field === sortBy ? !reverseSortDirection : false;
+ setReverseSortDirection(reversed);
+ setSortBy(field);
+ setSortedData(sortData(flags, { sortBy: field, reversed, search }));
+ };
+
+ const handleSearchChange = (event: React.ChangeEvent) => {
+ const { value } = event.currentTarget;
+ setSearch(value);
+ setSortedData(
+ sortData(flags, { sortBy, reversed: reverseSortDirection, search: value })
+ );
+ };
+
+ const rows = sortedData.map((row) => (
+
+
+ --{row.flag}
+
+
+ {row.value}
+
+
+ ));
+
+ return (
+
+
+ }
+ value={search}
+ onChange={handleSearchChange}
+ />
+
+
+
+ setSorting("flag")}
+ >
+ Flag
+
+
+ setSorting("value")}
+ >
+ Value
+
+
+
+
+ {rows.length > 0 ? (
+ rows
+ ) : (
+
+
+
+ Nothing found
+
+
+
+ )}
+
+
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/RulesPage.tsx b/web/ui/mantine-ui/src/pages/RulesPage.tsx
new file mode 100644
index 000000000..a335228f3
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/RulesPage.tsx
@@ -0,0 +1,201 @@
+import {
+ Accordion,
+ Alert,
+ Badge,
+ Card,
+ Group,
+ Stack,
+ Text,
+ Tooltip,
+} from "@mantine/core";
+// import { useQuery } from "react-query";
+import {
+ humanizeDurationRelative,
+ humanizeDuration,
+ now,
+} from "../lib/formatTime";
+import {
+ IconAlertTriangle,
+ IconBell,
+ IconHourglass,
+ IconInfoCircle,
+ IconRefresh,
+ IconRepeat,
+ IconTimeline,
+} from "@tabler/icons-react";
+import { useSuspenseAPIQuery } from "../api/api";
+import { RulesResult } from "../api/responseTypes/rules";
+import badgeClasses from "../Badge.module.css";
+import RuleDefinition from "../components/RuleDefinition";
+
+const healthBadgeClass = (state: string) => {
+ switch (state) {
+ case "ok":
+ return badgeClasses.healthOk;
+ case "err":
+ return badgeClasses.healthErr;
+ case "unknown":
+ return badgeClasses.healthUnknown;
+ default:
+ throw new Error("Unknown rule health state");
+ }
+};
+
+export default function RulesPage() {
+ const { data } = useSuspenseAPIQuery({ path: `/rules` });
+
+ return (
+
+ {data.data.groups.length === 0 && (
+ }>
+ No rule groups configured.
+
+ )}
+ {data.data.groups.map((g, i) => (
+
+
+
+
+ {g.name}
+
+
+ {g.file}
+
+
+
+
+ }
+ >
+ last run {humanizeDurationRelative(g.lastEvaluation, now())}
+
+
+
+ }
+ >
+ took {humanizeDuration(parseFloat(g.evaluationTime) * 1000)}
+
+
+
+ }
+ >
+ every {humanizeDuration(parseFloat(g.interval) * 1000)}{" "}
+
+
+
+
+ {g.rules.length === 0 && (
+ }>
+ No rules in rule group.
+
+ )}
+
+ {g.rules.map((r, j) => (
+
+
+
+
+ {r.type === "alerting" ? (
+
+
+
+ ) : (
+
+
+
+ )}
+ {r.name}
+
+
+
+
+ }
+ >
+ {humanizeDurationRelative(r.lastEvaluation, now())}
+
+
+
+
+ }
+ >
+ {humanizeDuration(
+ parseFloat(r.evaluationTime) * 1000
+ )}
+
+
+
+
+ {r.health}
+
+
+
+
+
+
+ {r.lastError && (
+ }
+ >
+ Error: {r.lastError}
+
+ )}
+
+
+ ))}
+
+
+ ))}
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/StatusPage.tsx b/web/ui/mantine-ui/src/pages/StatusPage.tsx
new file mode 100644
index 000000000..04d215cf5
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/StatusPage.tsx
@@ -0,0 +1,90 @@
+import { Card, Group, Stack, Table, Text } from "@mantine/core";
+import { useSuspenseAPIQuery } from "../api/api";
+import { IconRun, IconWall } from "@tabler/icons-react";
+import { formatTimestamp } from "../lib/formatTime";
+import { useSettings } from "../state/settingsSlice";
+
+export default function StatusPage() {
+ const { data: buildinfo } = useSuspenseAPIQuery>({
+ path: `/status/buildinfo`,
+ });
+ const { data: runtimeinfo } = useSuspenseAPIQuery>({
+ path: `/status/runtimeinfo`,
+ });
+
+ const { useLocalTime } = useSettings();
+
+ const statusConfig: Record<
+ string,
+ {
+ title?: string;
+ formatValue?: (v: string | boolean) => string;
+ }
+ > = {
+ startTime: {
+ title: "Start time",
+ formatValue: (v: string | boolean) =>
+ formatTimestamp(new Date(v as string).valueOf() / 1000, useLocalTime),
+ },
+ CWD: { title: "Working directory" },
+ reloadConfigSuccess: {
+ title: "Configuration reload",
+ formatValue: (v: string | boolean) => (v ? "Successful" : "Unsuccessful"),
+ },
+ lastConfigTime: {
+ title: "Last successful configuration reload",
+ formatValue: (v: string | boolean) =>
+ formatTimestamp(new Date(v as string).valueOf() / 1000, useLocalTime),
+ },
+ corruptionCount: { title: "WAL corruptions" },
+ goroutineCount: { title: "Goroutines" },
+ storageRetention: { title: "Storage retention" },
+ };
+
+ return (
+
+
+
+
+
+ Build information
+
+
+
+
+ {Object.entries(buildinfo.data).map(([k, v]) => (
+
+ {k}
+ {v}
+
+ ))}
+
+
+
+
+
+
+
+ Runtime information
+
+
+
+
+ {Object.entries(runtimeinfo.data).map(([k, v]) => {
+ const { title = k, formatValue = (val: string) => val } =
+ statusConfig[k] || {};
+ return (
+
+
+ {title}
+
+ {formatValue(v)}
+
+ );
+ })}
+
+
+
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/TSDBStatusPage.tsx b/web/ui/mantine-ui/src/pages/TSDBStatusPage.tsx
new file mode 100644
index 000000000..721ac6904
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/TSDBStatusPage.tsx
@@ -0,0 +1,105 @@
+import { Stack, Card, Table, Text } from "@mantine/core";
+import { useSuspenseAPIQuery } from "../api/api";
+import { TSDBStatusResult } from "../api/responseTypes/tsdbStatus";
+import { formatTimestamp } from "../lib/formatTime";
+import { useSettings } from "../state/settingsSlice";
+
+export default function TSDBStatusPage() {
+ const {
+ data: {
+ data: {
+ headStats,
+ labelValueCountByLabelName,
+ seriesCountByMetricName,
+ memoryInBytesByLabelName,
+ seriesCountByLabelValuePair,
+ },
+ },
+ } = useSuspenseAPIQuery({ path: `/status/tsdb` });
+
+ const { useLocalTime } = useSettings();
+
+ const unixToTime = (unix: number): string => {
+ const formatted = formatTimestamp(unix, useLocalTime);
+ if (formatted === "Invalid Date") {
+ if (numSeries === 0) {
+ return "No datapoints yet";
+ }
+ return `Error parsing time (${unix})`;
+ }
+
+ return formatted;
+ };
+
+ const { chunkCount, numSeries, numLabelPairs, minTime, maxTime } = headStats;
+ const stats = [
+ { name: "Number of Series", value: numSeries },
+ { name: "Number of Chunks", value: chunkCount },
+ { name: "Number of Label Pairs", value: numLabelPairs },
+ { name: "Current Min Time", value: `${unixToTime(minTime / 1000)}` },
+ { name: "Current Max Time", value: `${unixToTime(maxTime / 1000)}` },
+ ];
+
+ return (
+
+ {[
+ {
+ title: "TSDB Head Status",
+ stats,
+ formatAsCode: false,
+ },
+ {
+ title: "Top 10 label names with value count",
+ stats: labelValueCountByLabelName,
+ formatAsCode: true,
+ },
+ {
+ title: "Top 10 series count by metric names",
+ stats: seriesCountByMetricName,
+ formatAsCode: true,
+ },
+ {
+ title: "Top 10 label names with high memory usage",
+ unit: "Bytes",
+ stats: memoryInBytesByLabelName,
+ formatAsCode: true,
+ },
+ {
+ title: "Top 10 series count by label value pairs",
+ stats: seriesCountByLabelValuePair,
+ formatAsCode: true,
+ },
+ ].map(({ title, unit = "Count", stats, formatAsCode }) => (
+
+
+ {title}
+
+
+
+
+ Name
+ {unit}
+
+
+
+ {stats.map(({ name, value }) => {
+ return (
+
+
+ {formatAsCode ? {name}
: name}
+
+ {value}
+
+ );
+ })}
+
+
+
+ ))}
+
+ );
+}
diff --git a/web/ui/mantine-ui/src/pages/query/DataTable.module.css b/web/ui/mantine-ui/src/pages/query/DataTable.module.css
new file mode 100644
index 000000000..255173d9b
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/DataTable.module.css
@@ -0,0 +1,10 @@
+.tableWrapper {
+ border: 1px solid
+ light-dark(var(--mantine-color-gray-3), var(--mantine-color-dark-5));
+ border-radius: var(--mantine-radius-default);
+}
+
+.numberCell {
+ text-align: right;
+ font-variant-numeric: tabular-nums;
+}
diff --git a/web/ui/mantine-ui/src/pages/query/DataTable.tsx b/web/ui/mantine-ui/src/pages/query/DataTable.tsx
new file mode 100644
index 000000000..5ccc8cb95
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/DataTable.tsx
@@ -0,0 +1,214 @@
+import { FC, ReactNode, useState } from "react";
+import {
+ Table,
+ Alert,
+ Box,
+ SegmentedControl,
+ ScrollArea,
+ Group,
+ Stack,
+ Text,
+ Anchor,
+} from "@mantine/core";
+import { IconAlertTriangle, IconInfoCircle } from "@tabler/icons-react";
+import {
+ InstantQueryResult,
+ InstantSample,
+ RangeSamples,
+} from "../../api/responseTypes/query";
+import SeriesName from "./SeriesName";
+import classes from "./DataTable.module.css";
+import dayjs from "dayjs";
+import timezone from "dayjs/plugin/timezone";
+import { formatTimestamp } from "../../lib/formatTime";
+import HistogramChart from "./HistogramChart";
+import { Histogram } from "../../types/types";
+import { bucketRangeString } from "./HistogramHelpers";
+import { useSettings } from "../../state/settingsSlice";
+dayjs.extend(timezone);
+
+const maxFormattableSeries = 1000;
+const maxDisplayableSeries = 10000;
+
+const limitSeries = (
+ series: S[],
+ limit: boolean
+): S[] => {
+ if (limit && series.length > maxDisplayableSeries) {
+ return series.slice(0, maxDisplayableSeries);
+ }
+ return series;
+};
+
+export interface DataTableProps {
+ data: InstantQueryResult;
+ limitResults: boolean;
+ setLimitResults: (limit: boolean) => void;
+}
+
+const DataTable: FC = ({
+ data,
+ limitResults,
+ setLimitResults,
+}) => {
+ const [scale, setScale] = useState("exponential");
+ const { useLocalTime } = useSettings();
+
+ const { result, resultType } = data;
+ const doFormat = result.length <= maxFormattableSeries;
+
+ return (
+
+ {limitResults &&
+ ["vector", "matrix"].includes(resultType) &&
+ result.length > maxDisplayableSeries && (
+ }
+ title="Showing limited results"
+ >
+ Fetched {data.result.length} metrics, only displaying first{" "}
+ {maxDisplayableSeries} for performance reasons.
+ setLimitResults(false)}>
+ Show all results
+
+
+ )}
+
+ {!doFormat && (
+ }
+ >
+ Showing more than {maxFormattableSeries} series, turning off label
+ formatting to improve rendering performance.
+
+ )}
+
+
+
+
+ {resultType === "vector" ? (
+ limitSeries(result, limitResults).map((s, idx) => (
+
+
+
+
+
+ {s.value && s.value[1]}
+ {s.histogram && (
+
+
+
+
+
+ Count: {s.histogram[1].count}
+
+
+ Sum: {s.histogram[1].sum}
+
+
+
+ x-axis scale:
+
+
+
+ {histogramTable(s.histogram[1])}
+
+ )}
+
+
+ ))
+ ) : resultType === "matrix" ? (
+ limitSeries(result, limitResults).map((s, idx) => (
+
+
+
+
+
+ {s.values &&
+ s.values.map((v, idx) => (
+
+ {v[1]}{" "}
+
+ @ {v[0]}
+
+
+ ))}
+
+
+ ))
+ ) : resultType === "scalar" ? (
+
+ Scalar value
+ {result[1]}
+
+ ) : resultType === "string" ? (
+
+ String value
+ {result[1]}
+
+ ) : (
+ }
+ >
+ Invalid result value type
+
+ )}
+
+
+
+
+ );
+};
+
+const histogramTable = (h: Histogram): ReactNode => (
+
+
+
+ Bucket range
+ Count
+
+
+ {h.buckets?.map((b, i) => (
+
+
+ {bucketRangeString(b)}
+
+ {b[3]}
+
+ ))}
+
+
+
+);
+
+export default DataTable;
diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/Aggregation.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/Aggregation.tsx
new file mode 100644
index 000000000..3db2d4c0f
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/Aggregation.tsx
@@ -0,0 +1,109 @@
+import { FC } from "react";
+import ASTNode, { Aggregation, aggregationType } from "../../../promql/ast";
+import { labelNameList } from "../../../promql/format";
+import { parsePrometheusFloat } from "../../../lib/formatFloatValue";
+import { Card, Text } from "@mantine/core";
+
+const describeAggregationType = (
+ aggrType: aggregationType,
+ param: ASTNode | null
+) => {
+ switch (aggrType) {
+ case "sum":
+ return "sums over the sample values of the input series";
+ case "min":
+ return "takes the minimum of the sample values of the input series";
+ case "max":
+ return "takes the maximum of the sample values of the input series";
+ case "avg":
+ return "calculates the average of the sample values of the input series";
+ case "stddev":
+ return "calculates the population standard deviation of the sample values of the input series";
+ case "stdvar":
+ return "calculates the population standard variation of the sample values of the input series";
+ case "count":
+ return "counts the number of input series";
+ case "group":
+ return "groups the input series by the supplied grouping labels, while setting the sample value to 1";
+ case "count_values":
+ if (param === null) {
+ throw new Error(
+ "encountered count_values() node without label parameter"
+ );
+ }
+ if (param.type !== "stringLiteral") {
+ throw new Error(
+ "encountered count_values() node without string literal label parameter"
+ );
+ }
+ return (
+ <>
+ outputs one time series for each unique sample value in the input
+ series (each counting the number of occurrences of that value and
+ indicating the original value in the {labelNameList([param.val])}{" "}
+ label)
+ >
+ );
+ case "bottomk":
+ return "returns the bottom K series by value";
+ case "topk":
+ return "returns the top K series by value";
+ case "quantile":
+ if (param === null) {
+ throw new Error(
+ "encountered quantile() node without quantile parameter"
+ );
+ }
+ if (param.type === "numberLiteral") {
+ return `calculates the ${param.val}th quantile (${
+ parsePrometheusFloat(param.val) * 100
+ }th percentile) over the sample values of the input series`;
+ }
+ return "calculates a quantile over the sample values of the input series";
+
+ case "limitk":
+ return "limits the output to K series";
+ case "limit_ratio":
+ return "limits the output to a ratio of the input series";
+ default:
+ throw new Error(`invalid aggregation type ${aggrType}`);
+ }
+};
+
+const describeAggregationGrouping = (grouping: string[], without: boolean) => {
+ if (without) {
+ return (
+ <>aggregating away the [{labelNameList(grouping)}] label dimensions>
+ );
+ }
+
+ if (grouping.length === 1) {
+ return <>grouped by their {labelNameList(grouping)} label dimension>;
+ }
+
+ if (grouping.length > 1) {
+ return <>grouped by their [{labelNameList(grouping)}] label dimensions>;
+ }
+
+ return "aggregating away any label dimensions";
+};
+
+interface AggregationExplainViewProps {
+ node: Aggregation;
+}
+
+const AggregationExplainView: FC = ({ node }) => {
+ return (
+
+
+ Aggregation
+
+
+ This node {describeAggregationType(node.op, node.param)},{" "}
+ {describeAggregationGrouping(node.grouping, node.without)}.
+
+
+ );
+};
+
+export default AggregationExplainView;
diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/BinaryExpr.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/BinaryExpr.tsx
new file mode 100644
index 000000000..837b84da3
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/BinaryExpr.tsx
@@ -0,0 +1,106 @@
+import { FC } from "react";
+import { BinaryExpr } from "../../../../promql/ast";
+import serializeNode from "../../../../promql/serialize";
+import VectorScalarBinaryExprExplainView from "./VectorScalar";
+import VectorVectorBinaryExprExplainView from "./VectorVector";
+import ScalarScalarBinaryExprExplainView from "./ScalarScalar";
+import { nodeValueType } from "../../../../promql/utils";
+import { useSuspenseAPIQuery } from "../../../../api/api";
+import { InstantQueryResult } from "../../../../api/responseTypes/query";
+import { Card, Text } from "@mantine/core";
+
+interface BinaryExprExplainViewProps {
+ node: BinaryExpr;
+}
+
+const BinaryExprExplainView: FC = ({ node }) => {
+ const { data: lhs } = useSuspenseAPIQuery({
+ path: `/query`,
+ params: {
+ query: serializeNode(node.lhs),
+ },
+ });
+ const { data: rhs } = useSuspenseAPIQuery({
+ path: `/query`,
+ params: {
+ query: serializeNode(node.rhs),
+ },
+ });
+
+ if (
+ lhs.data.resultType !== nodeValueType(node.lhs) ||
+ rhs.data.resultType !== nodeValueType(node.rhs)
+ ) {
+ // This can happen for a brief transitionary render when "node" has changed, but "lhs" and "rhs"
+ // haven't switched back to loading yet (leading to a crash in e.g. the vector-vector explain view).
+ return null;
+ }
+
+ // Scalar-scalar binops.
+ if (lhs.data.resultType === "scalar" && rhs.data.resultType === "scalar") {
+ return (
+
+
+ Scalar-to-scalar binary operation
+
+
+
+ );
+ }
+
+ // Vector-scalar binops.
+ if (lhs.data.resultType === "scalar" && rhs.data.resultType === "vector") {
+ return (
+
+
+ Scalar-to-vector binary operation
+
+
+
+ );
+ }
+ if (lhs.data.resultType === "vector" && rhs.data.resultType === "scalar") {
+ return (
+
+
+ Vector-to-scalar binary operation
+
+
+
+ );
+ }
+
+ // Vector-vector binops.
+ if (lhs.data.resultType === "vector" && rhs.data.resultType === "vector") {
+ return (
+
+
+ Vector-to-vector binary operation
+
+
+
+ );
+ }
+
+ throw new Error("invalid binary operator argument types");
+};
+
+export default BinaryExprExplainView;
diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/ScalarScalar.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/ScalarScalar.tsx
new file mode 100644
index 000000000..874d82448
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/ScalarScalar.tsx
@@ -0,0 +1,54 @@
+import { FC } from "react";
+import { BinaryExpr } from "../../../../promql/ast";
+import { scalarBinOp } from "../../../../promql/binOp";
+import { Table } from "@mantine/core";
+import { SampleValue } from "../../../../api/responseTypes/query";
+import {
+ formatPrometheusFloat,
+ parsePrometheusFloat,
+} from "../../../../lib/formatFloatValue";
+
+interface ScalarScalarBinaryExprExplainViewProps {
+ node: BinaryExpr;
+ lhs: SampleValue;
+ rhs: SampleValue;
+}
+
+const ScalarScalarBinaryExprExplainView: FC<
+ ScalarScalarBinaryExprExplainViewProps
+> = ({ node, lhs, rhs }) => {
+ const [lhsVal, rhsVal] = [
+ parsePrometheusFloat(lhs[1]),
+ parsePrometheusFloat(rhs[1]),
+ ];
+
+ return (
+
+
+
+ Left value
+ Operator
+ Right value
+
+ Result
+
+
+
+
+ {lhs[1]}
+
+ {node.op}
+ {node.bool && " bool"}
+
+ {rhs[1]}
+ =
+
+ {formatPrometheusFloat(scalarBinOp(node.op, lhsVal, rhsVal))}
+
+
+
+
+ );
+};
+
+export default ScalarScalarBinaryExprExplainView;
diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorScalar.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorScalar.tsx
new file mode 100644
index 000000000..3a0652f81
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorScalar.tsx
@@ -0,0 +1,104 @@
+import { FC } from "react";
+import { BinaryExpr } from "../../../../promql/ast";
+// import SeriesName from '../../../../utils/SeriesName';
+import { isComparisonOperator } from "../../../../promql/utils";
+import { vectorElemBinop } from "../../../../promql/binOp";
+import {
+ InstantSample,
+ SampleValue,
+} from "../../../../api/responseTypes/query";
+import { Alert, Table, Text } from "@mantine/core";
+import {
+ formatPrometheusFloat,
+ parsePrometheusFloat,
+} from "../../../../lib/formatFloatValue";
+import SeriesName from "../../SeriesName";
+
+interface VectorScalarBinaryExprExplainViewProps {
+ node: BinaryExpr;
+ scalar: SampleValue;
+ vector: InstantSample[];
+ scalarLeft: boolean;
+}
+
+const VectorScalarBinaryExprExplainView: FC<
+ VectorScalarBinaryExprExplainViewProps
+> = ({ node, scalar, vector, scalarLeft }) => {
+ if (vector.length === 0) {
+ return (
+
+ One side of the binary operation produces 0 results, no matching
+ information shown.
+
+ );
+ }
+
+ return (
+
+
+
+ {!scalarLeft && Left labels }
+ Left value
+ Operator
+ {scalarLeft && Right labels }
+ Right value
+
+ Result
+
+
+
+ {vector.map((sample: InstantSample, idx) => {
+ if (!sample.value) {
+ // TODO: Handle native histograms or show a better error message.
+ throw new Error("Native histograms are not supported yet");
+ }
+
+ const vecVal = parsePrometheusFloat(sample.value[1]);
+ const scalVal = parsePrometheusFloat(scalar[1]);
+
+ let { value, keep } = scalarLeft
+ ? vectorElemBinop(node.op, scalVal, vecVal)
+ : vectorElemBinop(node.op, vecVal, scalVal);
+ if (isComparisonOperator(node.op) && scalarLeft) {
+ value = vecVal;
+ }
+ if (node.bool) {
+ value = Number(keep);
+ keep = true;
+ }
+
+ const scalarCell = {scalar[1]} ;
+ const vectorCells = (
+ <>
+
+
+
+ {sample.value[1]}
+ >
+ );
+
+ return (
+
+ {scalarLeft ? scalarCell : vectorCells}
+
+ {node.op}
+ {node.bool && " bool"}
+
+ {scalarLeft ? vectorCells : scalarCell}
+ =
+
+ {keep ? (
+ formatPrometheusFloat(value)
+ ) : (
+ dropped
+ )}
+
+
+ );
+ })}
+
+
+ );
+};
+
+export default VectorScalarBinaryExprExplainView;
diff --git a/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx
new file mode 100644
index 000000000..6735d1320
--- /dev/null
+++ b/web/ui/mantine-ui/src/pages/query/ExplainViews/BinaryExpr/VectorVector.tsx
@@ -0,0 +1,679 @@
+import React, { FC, useState } from "react";
+import { BinaryExpr, vectorMatchCardinality } from "../../../../promql/ast";
+import { InstantSample, Metric } from "../../../../api/responseTypes/query";
+import { isComparisonOperator, isSetOperator } from "../../../../promql/utils";
+import {
+ VectorMatchError,
+ BinOpMatchGroup,
+ MatchErrorType,
+ computeVectorVectorBinOp,
+ filteredSampleValue,
+} from "../../../../promql/binOp";
+import { formatNode, labelNameList } from "../../../../promql/format";
+import {
+ Alert,
+ Anchor,
+ Box,
+ Group,
+ List,
+ Switch,
+ Table,
+ Text,
+} from "@mantine/core";
+import { useLocalStorage } from "@mantine/hooks";
+import { IconAlertTriangle } from "@tabler/icons-react";
+import SeriesName from "../../SeriesName";
+
+// We use this color pool for two purposes:
+//
+// 1. To distinguish different match groups from each other.
+// 2. To distinguish multiple series within one match group from each other.
+const colorPool = [
+ "#1f77b4",
+ "#ff7f0e",
+ "#2ca02c",
+ "#d62728",
+ "#9467bd",
+ "#8c564b",
+ "#e377c2",
+ "#7f7f7f",
+ "#bcbd22",
+ "#17becf",
+ "#393b79",
+ "#637939",
+ "#8c6d31",
+ "#843c39",
+ "#d6616b",
+ "#7b4173",
+ "#ce6dbd",
+ "#9c9ede",
+ "#c5b0d5",
+ "#c49c94",
+ "#f7b6d2",
+ "#c7c7c7",
+ "#dbdb8d",
+ "#9edae5",
+ "#393b79",
+ "#637939",
+ "#8c6d31",
+ "#843c39",
+ "#d6616b",
+ "#7b4173",
+ "#ce6dbd",
+ "#9c9ede",
+ "#c5b0d5",
+ "#c49c94",
+ "#f7b6d2",
+ "#c7c7c7",
+ "#dbdb8d",
+ "#9edae5",
+ "#17becf",
+ "#393b79",
+ "#637939",
+ "#8c6d31",
+ "#843c39",
+ "#d6616b",
+ "#7b4173",
+ "#ce6dbd",
+ "#9c9ede",
+ "#c5b0d5",
+ "#c49c94",
+ "#f7b6d2",
+];
+
+const rhsColorOffset = colorPool.length / 2 + 3;
+const colorForIndex = (idx: number, offset?: number) =>
+ `${colorPool[(idx + (offset || 0)) % colorPool.length]}80`;
+
+const seriesSwatch = (color: string) => (
+
+);
+interface VectorVectorBinaryExprExplainViewProps {
+ node: BinaryExpr;
+ lhs: InstantSample[];
+ rhs: InstantSample[];
+}
+
+const noMatchLabels = (
+ metric: Metric,
+ on: boolean,
+ labels: string[]
+): Metric => {
+ const result: Metric = {};
+ for (const name in metric) {
+ if (!(labels.includes(name) === on && (on || name !== "__name__"))) {
+ result[name] = metric[name];
+ }
+ }
+ return result;
+};
+
+const explanationText = (node: BinaryExpr): React.ReactNode => {
+ const matching = node.matching!;
+ const [oneSide, manySide] =
+ matching.card === vectorMatchCardinality.oneToMany
+ ? ["left", "right"]
+ : ["right", "left"];
+
+ return (
+ <>
+
+ {isComparisonOperator(node.op) ? (
+ <>
+ This node filters the series from the left-hand side based on the
+ result of a "
+ {node.op}"
+ comparison with matching series from the right-hand side.
+ >
+ ) : (
+ <>
+ This node calculates the result of applying the "
+ {node.op}"
+ operator between the sample values of matching series from two sets
+ of time series.
+ >
+ )}
+
+
+ {(matching.labels.length > 0 || matching.on) &&
+ (matching.on ? (
+
+ on(
+ {labelNameList(matching.labels)}):{" "}
+ {matching.labels.length > 0 ? (
+ <>
+ series on both sides are matched on the labels{" "}
+ {labelNameList(matching.labels)}
+ >
+ ) : (
+ <>
+ all series from one side are matched to all series on the
+ other side.
+ >
+ )}
+
+ ) : (
+
+ ignoring(
+ {labelNameList(matching.labels)}): series on both sides are
+ matched on all of their labels, except{" "}
+ {labelNameList(matching.labels)}.
+
+ ))}
+ {matching.card === vectorMatchCardinality.oneToOne ? (
+
+ One-to-one match. Each series from the left-hand side is allowed to
+ match with at most one series on the right-hand side, and vice
+ versa.
+
+ ) : (
+
+
+ group_{manySide}({labelNameList(matching.include)})
+
+ : {matching.card} match. Each series from the {oneSide}-hand side is
+ allowed to match with multiple series from the {manySide}-hand side.
+ {matching.include.length !== 0 && (
+ <>
+ {" "}
+ Any {labelNameList(matching.include)} labels found on the{" "}
+ {oneSide}-hand side are propagated into the result, in addition
+ to the match group's labels.
+ >
+ )}
+
+ )}
+ {node.bool && (
+
+ bool: Instead of
+ filtering series based on the outcome of the comparison for matched
+ series, keep all series, but return the comparison outcome as a
+ boolean 0 or{" "}
+ 1 sample value.
+
+ )}
+
+ >
+ );
+};
+
+const explainError = (
+ binOp: BinaryExpr,
+ _mg: BinOpMatchGroup,
+ err: VectorMatchError
+) => {
+ const fixes = (
+ <>
+
+ Possible fixes:
+
+
+ {err.type === MatchErrorType.multipleMatchesForOneToOneMatching && (
+
+
+
+ Allow {err.dupeSide === "left" ? "many-to-one" : "one-to-many"}{" "}
+ matching
+
+ : If you want to allow{" "}
+ {err.dupeSide === "left" ? "many-to-one" : "one-to-many"}{" "}
+ matching, you need to explicitly request it by adding a{" "}
+
+ group_{err.dupeSide}()
+ {" "}
+ modifier to the operator:
+
+
+ {formatNode(
+ {
+ ...binOp,
+ matching: {
+ ...(binOp.matching
+ ? binOp.matching
+ : { labels: [], on: false, include: [] }),
+ card:
+ err.dupeSide === "left"
+ ? vectorMatchCardinality.manyToOne
+ : vectorMatchCardinality.oneToMany,
+ },
+ },
+ true,
+ 1
+ )}
+
+
+ )}
+
+ Update your matching parameters: Consider including
+ more differentiating labels in your matching modifiers (via{" "}
+ on() /{" "}
+ ignoring()) to
+ split multiple series into distinct match groups.
+
+
+ Aggregate the input: Consider aggregating away the
+ extra labels that create multiple series per group before applying the
+ binary operation.
+
+
+ >
+ );
+
+ switch (err.type) {
+ case MatchErrorType.multipleMatchesForOneToOneMatching:
+ return (
+ <>
+
+ Binary operators only allow one-to-one matching by
+ default, but we found{" "}
+ multiple series on the {err.dupeSide} side for this
+ match group.
+
+ {fixes}
+ >
+ );
+ case MatchErrorType.multipleMatchesOnBothSides:
+ return (
+ <>
+
+ We found multiple series on both sides for this
+ match group. Since many-to-many matching is not
+ supported, you need to ensure that at least one of the sides only
+ yields a single series.
+
+ {fixes}
+ >
+ );
+ case MatchErrorType.multipleMatchesOnOneSide: {
+ const [oneSide, manySide] =
+ binOp.matching!.card === vectorMatchCardinality.oneToMany
+ ? ["left", "right"]
+ : ["right", "left"];
+ return (
+ <>
+
+ You requested{" "}
+
+ {oneSide === "right" ? "many-to-one" : "one-to-many"} matching
+ {" "}
+ via{" "}
+
+ group_{manySide}()
+
+ , but we also found{" "}
+ multiple series on the {oneSide} side of the match
+ group. Make sure that the {oneSide} side only contains a single
+ series.
+
+ {fixes}
+ >
+ );
+ }
+ default:
+ throw new Error("unknown match error");
+ }
+};
+
+const VectorVectorBinaryExprExplainView: FC<
+ VectorVectorBinaryExprExplainViewProps
+> = ({ node, lhs, rhs }) => {
+ // TODO: Don't use Mantine's local storage as a one-off here. Decide whether we
+ // want to keep Redux, and then do it only via one or the other everywhere.
+ const [showSampleValues, setShowSampleValues] = useLocalStorage({
+ key: "queryPage.explain.binaryOperators.showSampleValues",
+ defaultValue: false,
+ });
+
+ const [maxGroups, setMaxGroups] = useState(100);
+ const [maxSeriesPerGroup, setMaxSeriesPerGroup] = useState<
+ number | undefined
+ >(100);
+
+ const { matching } = node;
+ if (matching === null) {
+ // The parent should make sure to only pass in vector-vector binops that have their "matching" field filled out.
+ throw new Error("missing matching parameters in vector-to-vector binop");
+ }
+
+ const { groups: matchGroups, numGroups } = computeVectorVectorBinOp(
+ node.op,
+ matching,
+ node.bool,
+ lhs,
+ rhs,
+ {
+ maxGroups: maxGroups,
+ maxSeriesPerGroup: maxSeriesPerGroup,
+ }
+ );
+ const errCount = Object.values(matchGroups).filter((mg) => mg.error).length;
+
+ return (
+ <>
+ {explanationText(node)}
+
+ {!isSetOperator(node.op) && (
+ <>
+
+ {/*
+ setAllowLineBreaks(event.currentTarget.checked)
+ }
+ /> */}
+
+ setShowSampleValues(event.currentTarget.checked)
+ }
+ />
+
+
+ {numGroups > Object.keys(matchGroups).length && (
+ }
+ >
+ Too many match groups to display, only showing{" "}
+ {Object.keys(matchGroups).length} out of {numGroups} groups.
+
+
+ setMaxGroups(undefined)}>
+ Show all groups
+
+
+ )}
+
+ {errCount > 0 && (
+ }
+ >
+ Found matching issues in {errCount} match group
+ {errCount > 1 ? "s" : ""}. See below for per-group error details.
+
+ )}
+
+
+
+ {Object.values(matchGroups).map((mg, mgIdx) => {
+ const {
+ groupLabels,
+ lhs,
+ lhsCount,
+ rhs,
+ rhsCount,
+ result,
+ error,
+ } = mg;
+
+ const matchGroupTitleRow = (color: string) => (
+
+
+
+
+
+ );
+
+ const matchGroupTable = (
+ series: InstantSample[],
+ seriesCount: number,
+ color: string,
+ colorOffset?: number
+ ) => (
+
+
+
+ {series.length === 0 ? (
+
+
+ no matching series
+
+
+ ) : (
+ <>
+ {matchGroupTitleRow(color)}
+ {series.map((s, sIdx) => {
+ if (s.value === undefined) {
+ // TODO: Figure out how to handle native histograms.
+ throw new Error(
+ "Native histograms are not supported yet"
+ );
+ }
+
+ return (
+
+
+