expand tests for protobuf and fix problems

Signed-off-by: Jeanette Tan <jeanette.tan@grafana.com>
pull/14978/head
Jeanette Tan 5 months ago committed by György Krajcsovits
parent cd498964e6
commit e3899187da

@ -48,6 +48,7 @@ type NhcbParser struct {
// Caches the entry itself if we are inserting a converted NHCB // Caches the entry itself if we are inserting a converted NHCB
// halfway through. // halfway through.
entry Entry entry Entry
err error
justInsertedNhcb bool justInsertedNhcb bool
// Caches the values and metric for the inserted converted NHCB. // Caches the values and metric for the inserted converted NHCB.
bytesNhcb []byte bytesNhcb []byte
@ -131,12 +132,13 @@ func (p *NhcbParser) Next() (Entry, error) {
return p.Next() return p.Next()
} }
} }
return p.entry, nil return p.entry, p.err
} }
et, err := p.parser.Next() et, err := p.parser.Next()
if err != nil { if err != nil {
if errors.Is(err, io.EOF) && p.processNhcb() { if errors.Is(err, io.EOF) && p.processNhcb() {
p.entry = et p.entry = et
p.err = err
return EntryHistogram, nil return EntryHistogram, nil
} }
return EntryInvalid, err return EntryInvalid, err
@ -236,10 +238,9 @@ func (p *NhcbParser) processNhcb() bool {
p.hNhcb = nil p.hNhcb = nil
p.fhNhcb = fh p.fhNhcb = fh
} }
buf := make([]byte, 0, 1024) p.metricStringNhcb = p.tempLsetNhcb.Get(labels.MetricName) + strings.ReplaceAll(p.tempLsetNhcb.DropMetricName().String(), ", ", ",")
p.bytesNhcb = p.tempLsetNhcb.Bytes(buf) p.bytesNhcb = []byte(p.metricStringNhcb)
p.lsetNhcb = p.tempLsetNhcb p.lsetNhcb = p.tempLsetNhcb
p.metricStringNhcb = p.tempLsetNhcb.String()
p.tempNhcb = convertnhcb.NewTempHistogram() p.tempNhcb = convertnhcb.NewTempHistogram()
p.isCollationInProgress = false p.isCollationInProgress = false
p.justInsertedNhcb = true p.justInsertedNhcb = true

@ -29,6 +29,7 @@ import (
"strings" "strings"
"sync" "sync"
"testing" "testing"
"text/template"
"time" "time"
"github.com/go-kit/log" "github.com/go-kit/log"
@ -3371,120 +3372,187 @@ test_summary_count 199
// Testing whether we can automatically convert scraped classic histograms into native histograms with custom buckets. // Testing whether we can automatically convert scraped classic histograms into native histograms with custom buckets.
func TestConvertClassicHistograms(t *testing.T) { func TestConvertClassicHistograms(t *testing.T) {
metricsTexts := map[string]string{ genTestCounterText := func(name string, value int, withMetadata bool) string {
"normal": ` if withMetadata {
# HELP test_metric_1 some help text return fmt.Sprintf(`
# TYPE test_metric_1 counter # HELP %s some help text
test_metric_1 1 # TYPE %s counter
# HELP test_histogram_1 This is a histogram with default buckets %s %d
# TYPE test_histogram_1 histogram `, name, name, name, value)
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 } else {
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 return fmt.Sprintf(`
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 %s %d
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 `, name, value)
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 }
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 }
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 genTestHistText := func(name string, withMetadata bool) string {
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="1"} 0 data := map[string]interface{}{
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 "name": name,
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="5"} 0 }
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="10"} 1 b := &bytes.Buffer{}
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 if withMetadata {
test_histogram_1_sum{address="0.0.0.0",port="5001"} 10 template.Must(template.New("").Parse(`
test_histogram_1_count{address="0.0.0.0",port="5001"} 1 # HELP {{.name}} This is a histogram with default buckets
# HELP test_metric_2 some help text # TYPE {{.name}} histogram
# TYPE test_metric_2 counter `)).Execute(b, data)
test_metric_2 1 }
# HELP test_histogram_2 This is a histogram with default buckets template.Must(template.New("").Parse(`
# TYPE test_histogram_2 histogram {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.005"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.01"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.025"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.05"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.1"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.25"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="0.5"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="1"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="1"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="2.5"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="5"} 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="5"} 0 {{.name}}_bucket{address="0.0.0.0",port="5001",le="10"} 1
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="10"} 1 {{.name}}_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 {{.name}}_sum{address="0.0.0.0",port="5001"} 10
test_histogram_2_sum{address="0.0.0.0",port="5001"} 10 {{.name}}_count{address="0.0.0.0",port="5001"} 1
test_histogram_2_count{address="0.0.0.0",port="5001"} 1 `)).Execute(b, data)
# HELP test_metric_3 some help text return b.String()
# TYPE test_metric_3 counter }
test_metric_3 1 genTestCounterProto := func(name string, value int) string {
# HELP test_histogram_3 This is a histogram with default buckets return fmt.Sprintf(`
# TYPE test_histogram_3 histogram name: "%s"
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 help: "some help text"
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 type: COUNTER
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 metric: <
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 counter: <
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 value: %d
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 >
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 >
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="1"} 0 `, name, value)
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 }
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="5"} 0 genTestHistProto := func(name string) string {
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="10"} 1 return fmt.Sprintf(`
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 name: "%s"
test_histogram_3_sum{address="0.0.0.0",port="5001"} 10 help: "This is a histogram with default buckets"
test_histogram_3_count{address="0.0.0.0",port="5001"} 1 type: HISTOGRAM
`, metric: <
"no metadata and different order": ` label: <
test_metric_1 1 name: "address"
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 value: "0.0.0.0"
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 >
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 label: <
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 name: "port"
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 value: "5001"
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 >
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 histogram: <
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="1"} 0 sample_count: 1
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 sample_sum: 10
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="5"} 0 bucket: <
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="10"} 1 cumulative_count: 0
test_histogram_1_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 upper_bound: 0.005
test_histogram_1_sum{address="0.0.0.0",port="5001"} 10 >
test_histogram_1_count{address="0.0.0.0",port="5001"} 1 bucket: <
test_metric_2 1 cumulative_count: 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 upper_bound: 0.01
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 >
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 bucket: <
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 cumulative_count: 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 upper_bound: 0.025
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 >
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 bucket: <
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="1"} 0 cumulative_count: 0
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 upper_bound: 0.05
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="5"} 0 >
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="10"} 1 bucket: <
test_histogram_2_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 cumulative_count: 0
test_histogram_2_sum{address="0.0.0.0",port="5001"} 10 upper_bound: 0.1
test_histogram_2_count{address="0.0.0.0",port="5001"} 1 >
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.005"} 0 bucket: <
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.01"} 0 cumulative_count: 0
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.025"} 0 upper_bound: 0.25
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.05"} 0 >
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.1"} 0 bucket: <
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.25"} 0 cumulative_count: 0
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="0.5"} 0 upper_bound: 0.5
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="1"} 0 >
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="2.5"} 0 bucket: <
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="5"} 0 cumulative_count: 0
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="10"} 1 upper_bound: 1
test_histogram_3_bucket{address="0.0.0.0",port="5001",le="+Inf"} 1 >
test_histogram_3_sum{address="0.0.0.0",port="5001"} 10 bucket: <
test_histogram_3_count{address="0.0.0.0",port="5001"} 1 cumulative_count: 0
test_metric_3 1 upper_bound: 2.5
`, >
bucket: <
cumulative_count: 0
upper_bound: 5
>
bucket: <
cumulative_count: 1
upper_bound: 10
>
>
timestamp_ms: 1234568
>
`, name)
} }
// The expected "le" values do not have the trailing ".0". metricsTexts := map[string]struct {
expectedLeValuesCorrect := []string{"0.005", "0.01", "0.025", "0.05", "0.1", "0.25", "0.5", "1", "2.5", "5", "10", "+Inf"} text []string
expectedLeValuesNone := []string{} contentType string
}{
"text": {
text: []string{
genTestCounterText("test_metric_1", 1, true),
genTestHistText("test_histogram_1", true),
genTestCounterText("test_metric_2", 1, true),
genTestHistText("test_histogram_2", true),
genTestCounterText("test_metric_3", 1, true),
genTestHistText("test_histogram_3", true),
},
},
"text, no metadata, in different order": {
text: []string{
genTestCounterText("test_metric_1", 1, false),
genTestHistText("test_histogram_1", false),
genTestCounterText("test_metric_2", 1, false),
genTestHistText("test_histogram_2", false),
genTestHistText("test_histogram_3", false),
genTestCounterText("test_metric_3", 1, false),
},
},
"protobuf": {
text: []string{
genTestCounterProto("test_metric_1", 1),
genTestHistProto("test_histogram_1"),
genTestCounterProto("test_metric_2", 1),
genTestHistProto("test_histogram_2"),
genTestCounterProto("test_metric_3", 1),
genTestHistProto("test_histogram_3"),
},
contentType: "application/vnd.google.protobuf",
},
"protobuf, in different order": {
text: []string{
genTestHistProto("test_histogram_1"),
genTestCounterProto("test_metric_1", 1),
genTestHistProto("test_histogram_2"),
genTestCounterProto("test_metric_2", 1),
genTestHistProto("test_histogram_3"),
genTestCounterProto("test_metric_3", 1),
},
contentType: "application/vnd.google.protobuf",
},
}
checkValues := func(labelName string, expectedValues []string, series storage.SeriesSet) { checkBucketValues := func(expectedCount int, contentType string, series storage.SeriesSet) {
labelName := "le"
var expectedValues []string
if expectedCount > 0 {
if contentType == "application/vnd.google.protobuf" {
// The expected "le" values have the trailing ".0".
expectedValues = []string{"0.005", "0.01", "0.025", "0.05", "0.1", "0.25", "0.5", "1.0", "2.5", "5.0", "10.0", "+Inf"}
} else {
// The expected "le" values do not have the trailing ".0".
expectedValues = []string{"0.005", "0.01", "0.025", "0.05", "0.1", "0.25", "0.5", "1", "2.5", "5", "10", "+Inf"}
}
}
foundLeValues := map[string]bool{} foundLeValues := map[string]bool{}
for series.Next() { for series.Next() {
@ -3494,64 +3562,98 @@ test_metric_3 1
foundLeValues[v] = true foundLeValues[v] = true
} }
require.Equal(t, len(expectedValues), len(foundLeValues), "number of label values not as expected") require.Equal(t, len(expectedValues), len(foundLeValues), "unexpected number of label values, expected %v but found %v", expectedValues, foundLeValues)
for _, v := range expectedValues { for _, v := range expectedValues {
require.Contains(t, foundLeValues, v, "label value not found") require.Contains(t, foundLeValues, v, "label value not found")
} }
} }
// Checks that the expected series is present and runs a basic sanity check of the values. // Checks that the expected series is present and runs a basic sanity check of the float values.
checkSeries := func(series storage.SeriesSet, encType chunkenc.ValueType, expectedCount int) { checkFloatSeries := func(series storage.SeriesSet, expectedCount int, expectedFloat float64) {
count := 0 count := 0
for series.Next() { for series.Next() {
i := series.At().Iterator(nil) i := series.At().Iterator(nil)
switch encType { loop:
for {
switch i.Next() {
case chunkenc.ValNone:
break loop
case chunkenc.ValFloat: case chunkenc.ValFloat:
for i.Next() == encType {
_, f := i.At() _, f := i.At()
require.Equal(t, 1., f) require.Equal(t, expectedFloat, f)
case chunkenc.ValHistogram:
panic("unexpected value type: histogram")
case chunkenc.ValFloatHistogram:
panic("unexpected value type: float histogram")
default:
panic("unexpected value type")
}
}
count++
}
require.Equal(t, expectedCount, count, "number of float series not as expected")
} }
// Checks that the expected series is present and runs a basic sanity check of the histogram values.
checkHistSeries := func(series storage.SeriesSet, expectedCount int, expectedSchema int32) {
count := 0
for series.Next() {
i := series.At().Iterator(nil)
loop:
for {
switch i.Next() {
case chunkenc.ValNone:
break loop
case chunkenc.ValFloat:
panic("unexpected value type: float")
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
for i.Next() == encType {
_, h := i.AtHistogram(nil) _, h := i.AtHistogram(nil)
require.Equal(t, expectedSchema, h.Schema)
require.Equal(t, uint64(1), h.Count)
require.Equal(t, 10.0, h.Sum)
case chunkenc.ValFloatHistogram:
_, h := i.AtFloatHistogram(nil)
require.Equal(t, expectedSchema, h.Schema)
require.Equal(t, uint64(1), h.Count) require.Equal(t, uint64(1), h.Count)
require.Equal(t, 10.0, h.Sum) require.Equal(t, 10.0, h.Sum)
default:
panic("unexpected value type")
} }
} }
count++ count++
} }
require.Equal(t, expectedCount, count, "number of series not as expected") require.Equal(t, expectedCount, count, "number of histogram series not as expected")
} }
for metricsTextName, metricsText := range metricsTexts { for metricsTextName, metricsText := range metricsTexts {
for name, tc := range map[string]struct { for name, tc := range map[string]struct {
scrapeClassicHistograms bool scrapeClassicHistograms bool
convertClassicHistograms bool convertClassicHistograms bool
expectedLeValues []string expectedClassicHistCount int
expectedNhcbCount int expectedNhcbCount int
}{ }{
"convert with scrape": { "convert with scrape": {
scrapeClassicHistograms: true, scrapeClassicHistograms: true,
convertClassicHistograms: true, convertClassicHistograms: true,
expectedLeValues: expectedLeValuesCorrect, expectedClassicHistCount: 1,
expectedNhcbCount: 1, expectedNhcbCount: 1,
}, },
"convert without scrape": { "convert without scrape": {
scrapeClassicHistograms: false, scrapeClassicHistograms: false,
convertClassicHistograms: true, convertClassicHistograms: true,
expectedLeValues: expectedLeValuesNone, expectedClassicHistCount: 0,
expectedNhcbCount: 1, expectedNhcbCount: 1,
}, },
"scrape without convert": { "scrape without convert": {
scrapeClassicHistograms: true, scrapeClassicHistograms: true,
convertClassicHistograms: false, convertClassicHistograms: false,
expectedLeValues: expectedLeValuesCorrect, expectedClassicHistCount: 1,
expectedNhcbCount: 0, expectedNhcbCount: 0,
}, },
"neither scrape nor convert": { "neither scrape nor convert": {
scrapeClassicHistograms: false, scrapeClassicHistograms: false,
convertClassicHistograms: false, convertClassicHistograms: false,
expectedLeValues: expectedLeValuesCorrect, // since these are sent without native histograms expectedClassicHistCount: 1, // since these are sent without native histograms
expectedNhcbCount: 0, expectedNhcbCount: 0,
}, },
} { } {
@ -3573,7 +3675,29 @@ test_metric_3 1
scraped := make(chan bool) scraped := make(chan bool)
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
fmt.Fprint(w, metricsText) if metricsText.contentType != "" {
w.Header().Set("Content-Type", `application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited`)
for _, text := range metricsText.text {
buf := &bytes.Buffer{}
// In case of protobuf, we have to create the binary representation.
pb := &dto.MetricFamily{}
// From text to proto message.
require.NoError(t, proto.UnmarshalText(text, pb))
// From proto message to binary protobuf.
protoBuf, err := proto.Marshal(pb)
require.NoError(t, err)
// Write first length, then binary protobuf.
varintBuf := binary.AppendUvarint(nil, uint64(len(protoBuf)))
buf.Write(varintBuf)
buf.Write(protoBuf)
w.Write(buf.Bytes())
}
} else {
for _, text := range metricsText.text {
fmt.Fprint(w, text)
}
}
scrapeCount++ scrapeCount++
if scrapeCount > 2 { if scrapeCount > 2 {
close(scraped) close(scraped)
@ -3581,7 +3705,7 @@ test_metric_3 1
})) }))
defer ts.Close() defer ts.Close()
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t)) sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{EnableNativeHistogramsIngestion: true}, newTestScrapeMetrics(t))
require.NoError(t, err) require.NoError(t, err)
defer sp.stop() defer sp.stop()
@ -3606,32 +3730,24 @@ test_metric_3 1
require.NoError(t, err) require.NoError(t, err)
defer q.Close() defer q.Close()
series := q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_metric_1")) var series storage.SeriesSet
checkSeries(series, chunkenc.ValFloat, 1)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_1_bucket"))
checkValues("le", tc.expectedLeValues, series)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_1"))
checkSeries(series, chunkenc.ValHistogram, tc.expectedNhcbCount)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_metric_2")) for i := 1; i <= 3; i++ {
checkSeries(series, chunkenc.ValFloat, 1) series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", fmt.Sprintf("test_metric_%d", i)))
checkFloatSeries(series, 1, 1.)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_2_bucket")) series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", fmt.Sprintf("test_histogram_%d_sum", i)))
checkValues("le", tc.expectedLeValues, series) checkFloatSeries(series, tc.expectedClassicHistCount, 10.)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_2")) series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", fmt.Sprintf("test_histogram_%d_count", i)))
checkSeries(series, chunkenc.ValHistogram, tc.expectedNhcbCount) checkFloatSeries(series, tc.expectedClassicHistCount, 1.)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_metric_3")) series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", fmt.Sprintf("test_histogram_%d_bucket", i)))
checkSeries(series, chunkenc.ValFloat, 1) checkBucketValues(tc.expectedClassicHistCount, metricsText.contentType, series)
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_3_bucket")) series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", fmt.Sprintf("test_histogram_%d", i)))
checkValues("le", tc.expectedLeValues, series) checkHistSeries(series, tc.expectedNhcbCount, histogram.CustomBucketsSchema)
}
series = q.Select(ctx, false, nil, labels.MustNewMatcher(labels.MatchRegexp, "__name__", "test_histogram_3"))
checkSeries(series, chunkenc.ValHistogram, tc.expectedNhcbCount)
}) })
} }
} }

Loading…
Cancel
Save