|
|
|
@ -112,7 +112,7 @@ type scrapeLoopOptions struct {
|
|
|
|
|
trackTimestampsStaleness bool |
|
|
|
|
interval time.Duration |
|
|
|
|
timeout time.Duration |
|
|
|
|
scrapeClassicHistograms bool |
|
|
|
|
alwaysScrapeClassicHist bool |
|
|
|
|
validationScheme model.ValidationScheme |
|
|
|
|
|
|
|
|
|
mrc []*relabel.Config |
|
|
|
@ -179,7 +179,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
|
|
|
|
opts.labelLimits, |
|
|
|
|
opts.interval, |
|
|
|
|
opts.timeout, |
|
|
|
|
opts.scrapeClassicHistograms, |
|
|
|
|
opts.alwaysScrapeClassicHist, |
|
|
|
|
options.EnableNativeHistogramsIngestion, |
|
|
|
|
options.EnableCreatedTimestampZeroIngestion, |
|
|
|
|
options.ExtraMetrics, |
|
|
|
@ -480,7 +480,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|
|
|
|
enableCompression = sp.config.EnableCompression |
|
|
|
|
trackTimestampsStaleness = sp.config.TrackTimestampsStaleness |
|
|
|
|
mrc = sp.config.MetricRelabelConfigs |
|
|
|
|
scrapeClassicHistograms = sp.config.ScrapeClassicHistograms |
|
|
|
|
alwaysScrapeClassicHist = sp.config.AlwaysScrapeClassicHistograms |
|
|
|
|
) |
|
|
|
|
|
|
|
|
|
validationScheme := model.UTF8Validation |
|
|
|
@ -521,7 +521,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|
|
|
|
mrc: mrc, |
|
|
|
|
interval: interval, |
|
|
|
|
timeout: timeout, |
|
|
|
|
scrapeClassicHistograms: scrapeClassicHistograms, |
|
|
|
|
alwaysScrapeClassicHist: alwaysScrapeClassicHist, |
|
|
|
|
validationScheme: validationScheme, |
|
|
|
|
}) |
|
|
|
|
if err != nil { |
|
|
|
@ -883,7 +883,7 @@ type scrapeLoop struct {
|
|
|
|
|
labelLimits *labelLimits |
|
|
|
|
interval time.Duration |
|
|
|
|
timeout time.Duration |
|
|
|
|
scrapeClassicHistograms bool |
|
|
|
|
alwaysScrapeClassicHist bool |
|
|
|
|
validationScheme model.ValidationScheme |
|
|
|
|
|
|
|
|
|
// Feature flagged options.
|
|
|
|
@ -1183,7 +1183,7 @@ func newScrapeLoop(ctx context.Context,
|
|
|
|
|
labelLimits *labelLimits, |
|
|
|
|
interval time.Duration, |
|
|
|
|
timeout time.Duration, |
|
|
|
|
scrapeClassicHistograms bool, |
|
|
|
|
alwaysScrapeClassicHist bool, |
|
|
|
|
enableNativeHistogramIngestion bool, |
|
|
|
|
enableCTZeroIngestion bool, |
|
|
|
|
reportExtraMetrics bool, |
|
|
|
@ -1237,7 +1237,7 @@ func newScrapeLoop(ctx context.Context,
|
|
|
|
|
labelLimits: labelLimits, |
|
|
|
|
interval: interval, |
|
|
|
|
timeout: timeout, |
|
|
|
|
scrapeClassicHistograms: scrapeClassicHistograms, |
|
|
|
|
alwaysScrapeClassicHist: alwaysScrapeClassicHist, |
|
|
|
|
enableNativeHistogramIngestion: enableNativeHistogramIngestion, |
|
|
|
|
enableCTZeroIngestion: enableCTZeroIngestion, |
|
|
|
|
reportExtraMetrics: reportExtraMetrics, |
|
|
|
@ -1537,7 +1537,7 @@ type appendErrors struct {
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string, ts time.Time) (total, added, seriesAdded int, err error) { |
|
|
|
|
p, err := textparse.New(b, contentType, sl.scrapeClassicHistograms, sl.enableCTZeroIngestion, sl.symbolTable) |
|
|
|
|
p, err := textparse.New(b, contentType, sl.alwaysScrapeClassicHist, sl.enableCTZeroIngestion, sl.symbolTable) |
|
|
|
|
if err != nil { |
|
|
|
|
sl.l.Debug( |
|
|
|
|
"Invalid content type on scrape, using prometheus parser as fallback.", |
|
|
|
|