@ -799,10 +799,10 @@ load 10s
{
Query : "metricWith1HistogramEvery10Seconds" ,
Start : time . Unix ( 21 , 0 ) ,
PeakSamples : 1 2 ,
TotalSamples : 1 2, // 1 histogram sample of size 12 / 10 seconds
PeakSamples : 1 3 ,
TotalSamples : 1 3, // 1 histogram HPoint of size 13 / 10 seconds
TotalSamplesPerStep : stats . TotalSamplesPerStep {
21000 : 1 2 ,
21000 : 1 3 ,
} ,
} ,
{
@ -818,7 +818,7 @@ load 10s
{
Query : "timestamp(metricWith1HistogramEvery10Seconds)" ,
Start : time . Unix ( 21 , 0 ) ,
PeakSamples : 1 3, // histogram size 12 + 1 extra because of timestamp
PeakSamples : 1 5, // histogram size 13 + 1 extra because Sample overhead + 1 float result
TotalSamples : 1 , // 1 float sample (because of timestamp) / 10 seconds
TotalSamplesPerStep : stats . TotalSamplesPerStep {
21000 : 1 ,
@ -899,10 +899,10 @@ load 10s
{
Query : "metricWith1HistogramEvery10Seconds[60s]" ,
Start : time . Unix ( 201 , 0 ) ,
PeakSamples : 7 2 ,
TotalSamples : 7 2, // 1 histogram (size 12 ) / 10 seconds * 60 seconds
PeakSamples : 7 8 ,
TotalSamples : 7 8, // 1 histogram (size 13 HPoint ) / 10 seconds * 60 seconds
TotalSamplesPerStep : stats . TotalSamplesPerStep {
201000 : 7 2 ,
201000 : 7 8 ,
} ,
} ,
{
@ -929,11 +929,11 @@ load 10s
{
Query : "max_over_time(metricWith1HistogramEvery10Seconds[60s])[20s:5s]" ,
Start : time . Unix ( 201 , 0 ) ,
PeakSamples : 7 2 ,
TotalSamples : 3 12, // (1 histogram (size 12) / 10 seconds * 60 seconds) * 4 + 2 * 12 as
PeakSamples : 7 8 ,
TotalSamples : 3 38, // (1 histogram (size 13 HPoint) / 10 seconds * 60 seconds) * 4 + 2 * 13 as
// max_over_time(metricWith1SampleEvery10Seconds[60s]) @ 190 and 200 will return 7 samples.
TotalSamplesPerStep : stats . TotalSamplesPerStep {
201000 : 3 12 ,
201000 : 3 38 ,
} ,
} ,
{
@ -948,10 +948,10 @@ load 10s
{
Query : "metricWith1HistogramEvery10Seconds[60s] @ 30" ,
Start : time . Unix ( 201 , 0 ) ,
PeakSamples : 48 ,
TotalSamples : 48 , // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
PeakSamples : 52 ,
TotalSamples : 52 , // @ modifier force the evaluation to at 30 seconds - So it brings 4 datapoints (0, 10, 20, 30 seconds) * 1 series
TotalSamplesPerStep : stats . TotalSamplesPerStep {
201000 : 48 ,
201000 : 52 ,
} ,
} ,
{
@ -1086,13 +1086,13 @@ load 10s
Start : time . Unix ( 204 , 0 ) ,
End : time . Unix ( 223 , 0 ) ,
Interval : 5 * time . Second ,
PeakSamples : 48 ,
TotalSamples : 48, // 1 histogram (size 12 ) per query * 4 steps
PeakSamples : 52 ,
TotalSamples : 52, // 1 histogram (size 13 HPoint ) per query * 4 steps
TotalSamplesPerStep : stats . TotalSamplesPerStep {
204000 : 1 2 , // aligned to the step time, not the sample time
209000 : 1 2 ,
214000 : 1 2 ,
219000 : 1 2 ,
204000 : 1 3 , // aligned to the step time, not the sample time
209000 : 1 3 ,
214000 : 1 3 ,
219000 : 1 3 ,
} ,
} ,
{
@ -1116,8 +1116,8 @@ load 10s
Start : time . Unix ( 201 , 0 ) ,
End : time . Unix ( 220 , 0 ) ,
Interval : 5 * time . Second ,
PeakSamples : 1 6,
TotalSamples : 4 , // 1 sample per query * 4 steps
PeakSamples : 1 8, // 13 histogram size + 1 extra because of Sample overhead + 4 float results
TotalSamples : 4 , // 1 sample per query * 4 steps
TotalSamplesPerStep : stats . TotalSamplesPerStep {
201000 : 1 ,
206000 : 1 ,