Browse Source

Add a limit to the number of in-flight requests (#1166)

In order to avoid stuck collectors using up all system resources, add a
limit to the number of parallel in-flight scrape requests. This will
return a 503 error.

Default to 40 requests, this seems like a reasonable number based on:
* Two Prometheus servers scraping every 15 seconds.
* Failing scrapes after 5 minutes of stuckness.

Signed-off-by: Ben Kochie <superq@gmail.com>
pull/1173/head
Ben Kochie 6 years ago committed by Johannes 'fish' Ziemke
parent
commit
ffefc8e74d
  1. 15
      node_exporter.go

15
node_exporter.go

@ -36,12 +36,14 @@ type handler struct {
// the exporter itself. // the exporter itself.
exporterMetricsRegistry *prometheus.Registry exporterMetricsRegistry *prometheus.Registry
includeExporterMetrics bool includeExporterMetrics bool
maxRequests int
} }
func newHandler(includeExporterMetrics bool) *handler { func newHandler(includeExporterMetrics bool, maxRequests int) *handler {
h := &handler{ h := &handler{
exporterMetricsRegistry: prometheus.NewRegistry(), exporterMetricsRegistry: prometheus.NewRegistry(),
includeExporterMetrics: includeExporterMetrics, includeExporterMetrics: includeExporterMetrics,
maxRequests: maxRequests,
} }
if h.includeExporterMetrics { if h.includeExporterMetrics {
h.exporterMetricsRegistry.MustRegister( h.exporterMetricsRegistry.MustRegister(
@ -111,8 +113,9 @@ func (h *handler) innerHandler(filters ...string) (http.Handler, error) {
handler := promhttp.HandlerFor( handler := promhttp.HandlerFor(
prometheus.Gatherers{h.exporterMetricsRegistry, r}, prometheus.Gatherers{h.exporterMetricsRegistry, r},
promhttp.HandlerOpts{ promhttp.HandlerOpts{
ErrorLog: log.NewErrorLogger(), ErrorLog: log.NewErrorLogger(),
ErrorHandling: promhttp.ContinueOnError, ErrorHandling: promhttp.ContinueOnError,
MaxRequestsInFlight: h.maxRequests,
}, },
) )
if h.includeExporterMetrics { if h.includeExporterMetrics {
@ -139,6 +142,10 @@ func main() {
"web.disable-exporter-metrics", "web.disable-exporter-metrics",
"Exclude metrics about the exporter itself (promhttp_*, process_*, go_*).", "Exclude metrics about the exporter itself (promhttp_*, process_*, go_*).",
).Bool() ).Bool()
maxRequests = kingpin.Flag(
"web.max-requests",
"Maximum number of parallel scrape requests. Use 0 to disable.",
).Default("40").Int()
) )
log.AddFlags(kingpin.CommandLine) log.AddFlags(kingpin.CommandLine)
@ -149,7 +156,7 @@ func main() {
log.Infoln("Starting node_exporter", version.Info()) log.Infoln("Starting node_exporter", version.Info())
log.Infoln("Build context", version.BuildContext()) log.Infoln("Build context", version.BuildContext())
http.Handle(*metricsPath, newHandler(!*disableExporterMetrics)) http.Handle(*metricsPath, newHandler(!*disableExporterMetrics, *maxRequests))
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`<html> w.Write([]byte(`<html>
<head><title>Node Exporter</title></head> <head><title>Node Exporter</title></head>

Loading…
Cancel
Save