Browse Source

textfile: Allow specifiying multiple directory globs. (#3135)

We already support reading from multiple directories though only using globs. Now we can specify them outright.

Example use case is exporting both static info on a RO FS generated during image building and traditional uses of textfiles (e.g. for R/W service metrics files) without scripting a file copy.

* keep flag name for compatibility
* clarify flag help text
* add test case (replicating the glob one)

Signed-off-by: eduarrrd <eduarrrd@users.noreply.github.com>
pull/3138/head
eduarrrd 2 months ago committed by GitHub
parent
commit
11f93d3da1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 22
      collector/textfile.go
  2. 75
      collector/textfile_test.go

22
collector/textfile.go

@ -32,8 +32,8 @@ import (
) )
var ( var (
textFileDirectory = kingpin.Flag("collector.textfile.directory", "Directory to read text files with metrics from.").Default("").String() textFileDirectories = kingpin.Flag("collector.textfile.directory", "Directory to read text files with metrics from, supports glob matching. (repeatable)").Default("").Strings()
mtimeDesc = prometheus.NewDesc( mtimeDesc = prometheus.NewDesc(
"node_textfile_mtime_seconds", "node_textfile_mtime_seconds",
"Unixtime mtime of textfiles successfully read.", "Unixtime mtime of textfiles successfully read.",
[]string{"file"}, []string{"file"},
@ -42,7 +42,7 @@ var (
) )
type textFileCollector struct { type textFileCollector struct {
path string paths []string
// Only set for testing to get predictable output. // Only set for testing to get predictable output.
mtime *float64 mtime *float64
logger *slog.Logger logger *slog.Logger
@ -56,7 +56,7 @@ func init() {
// in the given textfile directory. // in the given textfile directory.
func NewTextFileCollector(logger *slog.Logger) (Collector, error) { func NewTextFileCollector(logger *slog.Logger) (Collector, error) {
c := &textFileCollector{ c := &textFileCollector{
path: *textFileDirectory, paths: *textFileDirectories,
logger: logger, logger: logger,
} }
return c, nil return c, nil
@ -194,11 +194,15 @@ func (c *textFileCollector) Update(ch chan<- prometheus.Metric) error {
metricsNamesToFiles := map[string][]string{} metricsNamesToFiles := map[string][]string{}
metricsNamesToHelpTexts := map[string][2]string{} metricsNamesToHelpTexts := map[string][2]string{}
paths, err := filepath.Glob(c.path) paths := []string{}
if err != nil || len(paths) == 0 { for _, glob := range c.paths {
// not glob or not accessible path either way assume single ps, err := filepath.Glob(glob)
// directory and let os.ReadDir handle it if err != nil || len(ps) == 0 {
paths = []string{c.path} // not glob or not accessible path either way assume single
// directory and let os.ReadDir handle it
ps = []string{glob}
}
paths = append(paths, ps...)
} }
mtimes := make(map[string]time.Time) mtimes := make(map[string]time.Time)

75
collector/textfile_test.go

@ -52,75 +52,82 @@ func (a collectorAdapter) Collect(ch chan<- prometheus.Metric) {
func TestTextfileCollector(t *testing.T) { func TestTextfileCollector(t *testing.T) {
tests := []struct { tests := []struct {
path string paths []string
out string out string
}{ }{
{ {
path: "fixtures/textfile/no_metric_files", paths: []string{"fixtures/textfile/no_metric_files"},
out: "fixtures/textfile/no_metric_files.out", out: "fixtures/textfile/no_metric_files.out",
}, },
{ {
path: "fixtures/textfile/two_metric_files", paths: []string{"fixtures/textfile/two_metric_files"},
out: "fixtures/textfile/two_metric_files.out", out: "fixtures/textfile/two_metric_files.out",
}, },
{ {
path: "fixtures/textfile/nonexistent_path", paths: []string{"fixtures/textfile/nonexistent_path"},
out: "fixtures/textfile/nonexistent_path.out", out: "fixtures/textfile/nonexistent_path.out",
}, },
{ {
path: "fixtures/textfile/client_side_timestamp", paths: []string{"fixtures/textfile/client_side_timestamp"},
out: "fixtures/textfile/client_side_timestamp.out", out: "fixtures/textfile/client_side_timestamp.out",
}, },
{ {
path: "fixtures/textfile/different_metric_types", paths: []string{"fixtures/textfile/different_metric_types"},
out: "fixtures/textfile/different_metric_types.out", out: "fixtures/textfile/different_metric_types.out",
}, },
{ {
path: "fixtures/textfile/inconsistent_metrics", paths: []string{"fixtures/textfile/inconsistent_metrics"},
out: "fixtures/textfile/inconsistent_metrics.out", out: "fixtures/textfile/inconsistent_metrics.out",
}, },
{ {
path: "fixtures/textfile/histogram", paths: []string{"fixtures/textfile/histogram"},
out: "fixtures/textfile/histogram.out", out: "fixtures/textfile/histogram.out",
}, },
{ {
path: "fixtures/textfile/histogram_extra_dimension", paths: []string{"fixtures/textfile/histogram_extra_dimension"},
out: "fixtures/textfile/histogram_extra_dimension.out", out: "fixtures/textfile/histogram_extra_dimension.out",
}, },
{ {
path: "fixtures/textfile/summary", paths: []string{"fixtures/textfile/summary"},
out: "fixtures/textfile/summary.out", out: "fixtures/textfile/summary.out",
}, },
{ {
path: "fixtures/textfile/summary_extra_dimension", paths: []string{"fixtures/textfile/summary_extra_dimension"},
out: "fixtures/textfile/summary_extra_dimension.out", out: "fixtures/textfile/summary_extra_dimension.out",
}, },
{ {
path: "fixtures/textfile/*_extra_dimension", paths: []string{
out: "fixtures/textfile/glob_extra_dimension.out", "fixtures/textfile/histogram_extra_dimension",
"fixtures/textfile/summary_extra_dimension",
},
out: "fixtures/textfile/glob_extra_dimension.out",
}, },
{ {
path: "fixtures/textfile/metrics_merge_empty_help", paths: []string{"fixtures/textfile/*_extra_dimension"},
out: "fixtures/textfile/metrics_merge_empty_help.out", out: "fixtures/textfile/glob_extra_dimension.out",
}, },
{ {
path: "fixtures/textfile/metrics_merge_no_help", paths: []string{"fixtures/textfile/metrics_merge_empty_help"},
out: "fixtures/textfile/metrics_merge_no_help.out", out: "fixtures/textfile/metrics_merge_empty_help.out",
}, },
{ {
path: "fixtures/textfile/metrics_merge_same_help", paths: []string{"fixtures/textfile/metrics_merge_no_help"},
out: "fixtures/textfile/metrics_merge_same_help.out", out: "fixtures/textfile/metrics_merge_no_help.out",
}, },
{ {
path: "fixtures/textfile/metrics_merge_different_help", paths: []string{"fixtures/textfile/metrics_merge_same_help"},
out: "fixtures/textfile/metrics_merge_different_help.out", out: "fixtures/textfile/metrics_merge_same_help.out",
},
{
paths: []string{"fixtures/textfile/metrics_merge_different_help"},
out: "fixtures/textfile/metrics_merge_different_help.out",
}, },
} }
for i, test := range tests { for i, test := range tests {
mtime := 1.0 mtime := 1.0
c := &textFileCollector{ c := &textFileCollector{
path: test.path, paths: test.paths,
mtime: &mtime, mtime: &mtime,
logger: slog.New(slog.NewTextHandler(io.Discard, nil)), logger: slog.New(slog.NewTextHandler(io.Discard, nil)),
} }
@ -146,7 +153,7 @@ func TestTextfileCollector(t *testing.T) {
} }
if string(want) != got { if string(want) != got {
t.Fatalf("%d.%q want:\n\n%s\n\ngot:\n\n%s", i, test.path, string(want), got) t.Fatalf("%d.%q want:\n\n%s\n\ngot:\n\n%s", i, test.paths, string(want), got)
} }
} }
} }

Loading…
Cancel
Save