tsdb: Rework iterators (#9877)

- Pick At... method via return value of Next/Seek.
- Do not clobber returned buckets.
- Add partial FloatHistogram suppert.

Note that the promql package is now _only_ dealing with
FloatHistograms, following the idea that PromQL only knows float
values.

As a byproduct, I have removed the histogramSeries metric. In my
understanding, series can have both float and histogram samples, so
that metric doesn't make sense anymore.

As another byproduct, I have converged the sampleBuf and the
histogramSampleBuf in memSeries into one. The sample type stored in
the sampleBuf has been extended to also contain histograms even before
this commit.

Signed-off-by: beorn7 <beorn@grafana.com>
pull/9889/head
Björn Rabenstein 2021-11-29 08:54:23 +01:00 committed by GitHub
parent 26c0a433f5
commit 7e42acd3b1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 1327 additions and 978 deletions

View File

@ -27,6 +27,7 @@ import (
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb" "github.com/prometheus/prometheus/tsdb"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
type backfillSample struct { type backfillSample struct {
@ -52,7 +53,7 @@ func queryAllSeries(t testing.TB, q storage.Querier, expectedMinTime, expectedMa
series := ss.At() series := ss.At()
it := series.Iterator() it := series.Iterator()
require.NoError(t, it.Err()) require.NoError(t, it.Err())
for it.Next() { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
samples = append(samples, backfillSample{Timestamp: ts, Value: v, Labels: series.Labels()}) samples = append(samples, backfillSample{Timestamp: ts, Value: v, Labels: series.Labels()})
} }

View File

@ -29,6 +29,7 @@ import (
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/tsdb" "github.com/prometheus/prometheus/tsdb"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
type mockQueryRangeAPI struct { type mockQueryRangeAPI struct {
@ -148,7 +149,7 @@ func TestBackfillRuleIntegration(t *testing.T) {
require.Equal(t, 3, len(series.Labels())) require.Equal(t, 3, len(series.Labels()))
} }
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
samplesCount++ samplesCount++
ts, v := it.At() ts, v := it.At()
if v == testValue { if v == testValue {

View File

@ -32,6 +32,7 @@ import (
"time" "time"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/index" "github.com/prometheus/prometheus/tsdb/index"
"github.com/alecthomas/units" "github.com/alecthomas/units"
@ -646,7 +647,7 @@ func dumpSamples(path string, mint, maxt int64) (err error) {
series := ss.At() series := ss.At()
lbs := series.Labels() lbs := series.Labels()
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
ts, val := it.At() ts, val := it.At()
fmt.Printf("%s %g %d\n", lbs, val, ts) fmt.Printf("%s %g %d\n", lbs, val, ts)
} }

View File

@ -150,9 +150,9 @@ type FloatBucketIterator interface {
} }
// FloatBucket represents a bucket with lower and upper limit and the count of // FloatBucket represents a bucket with lower and upper limit and the count of
// samples in the bucket. It also specifies if each limit is inclusive or // samples in the bucket as a float64. It also specifies if each limit is
// not. (Mathematically, inclusive limits create a closed interval, and // inclusive or not. (Mathematically, inclusive limits create a closed interval,
// non-inclusive limits an open interval.) // and non-inclusive limits an open interval.)
// //
// To represent cumulative buckets, Lower is set to -Inf, and the Count is then // To represent cumulative buckets, Lower is set to -Inf, and the Count is then
// cumulative (including the counts of all buckets for smaller values). // cumulative (including the counts of all buckets for smaller values).

View File

@ -70,19 +70,19 @@ type Span struct {
func (h *Histogram) Copy() *Histogram { func (h *Histogram) Copy() *Histogram {
c := *h c := *h
if h.PositiveSpans != nil { if len(h.PositiveSpans) != 0 {
c.PositiveSpans = make([]Span, len(h.PositiveSpans)) c.PositiveSpans = make([]Span, len(h.PositiveSpans))
copy(c.PositiveSpans, h.PositiveSpans) copy(c.PositiveSpans, h.PositiveSpans)
} }
if h.NegativeSpans != nil { if len(h.NegativeSpans) != 0 {
c.NegativeSpans = make([]Span, len(h.NegativeSpans)) c.NegativeSpans = make([]Span, len(h.NegativeSpans))
copy(c.NegativeSpans, h.NegativeSpans) copy(c.NegativeSpans, h.NegativeSpans)
} }
if h.PositiveBuckets != nil { if len(h.PositiveBuckets) != 0 {
c.PositiveBuckets = make([]int64, len(h.PositiveBuckets)) c.PositiveBuckets = make([]int64, len(h.PositiveBuckets))
copy(c.PositiveBuckets, h.PositiveBuckets) copy(c.PositiveBuckets, h.PositiveBuckets)
} }
if h.NegativeBuckets != nil { if len(h.NegativeBuckets) != 0 {
c.NegativeBuckets = make([]int64, len(h.NegativeBuckets)) c.NegativeBuckets = make([]int64, len(h.NegativeBuckets))
copy(c.NegativeBuckets, h.NegativeBuckets) copy(c.NegativeBuckets, h.NegativeBuckets)
} }
@ -162,15 +162,15 @@ func (h *Histogram) ToFloat() *FloatHistogram {
positiveSpans, negativeSpans []Span positiveSpans, negativeSpans []Span
positiveBuckets, negativeBuckets []float64 positiveBuckets, negativeBuckets []float64
) )
if h.PositiveSpans != nil { if len(h.PositiveSpans) != 0 {
positiveSpans = make([]Span, len(h.PositiveSpans)) positiveSpans = make([]Span, len(h.PositiveSpans))
copy(positiveSpans, h.PositiveSpans) copy(positiveSpans, h.PositiveSpans)
} }
if h.NegativeSpans != nil { if len(h.NegativeSpans) != 0 {
negativeSpans = make([]Span, len(h.NegativeSpans)) negativeSpans = make([]Span, len(h.NegativeSpans))
copy(negativeSpans, h.NegativeSpans) copy(negativeSpans, h.NegativeSpans)
} }
if h.PositiveBuckets != nil { if len(h.PositiveBuckets) != 0 {
positiveBuckets = make([]float64, len(h.PositiveBuckets)) positiveBuckets = make([]float64, len(h.PositiveBuckets))
var current float64 var current float64
for i, b := range h.PositiveBuckets { for i, b := range h.PositiveBuckets {
@ -178,7 +178,7 @@ func (h *Histogram) ToFloat() *FloatHistogram {
positiveBuckets[i] = current positiveBuckets[i] = current
} }
} }
if h.NegativeBuckets != nil { if len(h.NegativeBuckets) != 0 {
negativeBuckets = make([]float64, len(h.NegativeBuckets)) negativeBuckets = make([]float64, len(h.NegativeBuckets))
var current float64 var current float64
for i, b := range h.NegativeBuckets { for i, b := range h.NegativeBuckets {

View File

@ -1634,28 +1634,30 @@ func (ev *evaluator) vectorSelector(node *parser.VectorSelector, ts int64) (Vect
} }
// vectorSelectorSingle evaluates an instant vector for the iterator of one time series. // vectorSelectorSingle evaluates an instant vector for the iterator of one time series.
func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) (int64, float64, *histogram.Histogram, bool) { func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) (
int64, float64, *histogram.FloatHistogram, bool,
) {
refTime := ts - durationMilliseconds(node.Offset) refTime := ts - durationMilliseconds(node.Offset)
var t int64 var t int64
var v float64 var v float64
var h *histogram.Histogram var h *histogram.FloatHistogram
valueType := it.Seek(refTime) valueType := it.Seek(refTime)
switch valueType { switch valueType {
case storage.ValNone: case chunkenc.ValNone:
if it.Err() != nil { if it.Err() != nil {
ev.error(it.Err()) ev.error(it.Err())
} }
case storage.ValFloat: case chunkenc.ValFloat:
t, v = it.Values() t, v = it.Values()
case storage.ValHistogram: case chunkenc.ValHistogram, chunkenc.ValFloatHistogram:
t, h = it.HistogramValues() t, h = it.FloatHistogramValues()
default: default:
panic(fmt.Errorf("unknown value type %v", valueType)) panic(fmt.Errorf("unknown value type %v", valueType))
} }
if valueType == storage.ValNone || t > refTime { if valueType == chunkenc.ValNone || t > refTime {
var ok bool var ok bool
t, v, h, ok = it.PeekPrev() t, v, _, h, ok = it.PeekPrev()
if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) { if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) {
return 0, 0, nil, false return 0, 0, nil, false
} }
@ -1747,19 +1749,23 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
out = out[:0] out = out[:0]
} }
ok := it.Seek(maxt) soughtValueType := it.Seek(maxt)
if !ok { if soughtValueType == chunkenc.ValNone {
if it.Err() != nil { if it.Err() != nil {
ev.error(it.Err()) ev.error(it.Err())
} }
} }
buf := it.Buffer() buf := it.Buffer()
for buf.Next() { loop:
if buf.ChunkEncoding() == chunkenc.EncHistogram { for {
t, h := buf.AtHistogram() switch buf.Next() {
case chunkenc.ValNone:
break loop
case chunkenc.ValFloatHistogram, chunkenc.ValHistogram:
t, h := buf.AtFloatHistogram()
if value.IsStaleNaN(h.Sum) { if value.IsStaleNaN(h.Sum) {
continue continue loop
} }
// Values in the buffer are guaranteed to be smaller than maxt. // Values in the buffer are guaranteed to be smaller than maxt.
if t >= mint { if t >= mint {
@ -1769,10 +1775,10 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
ev.currentSamples++ ev.currentSamples++
out = append(out, Point{T: t, H: h}) out = append(out, Point{T: t, H: h})
} }
} else { case chunkenc.ValFloat:
t, v := buf.At() t, v := buf.At()
if value.IsStaleNaN(v) { if value.IsStaleNaN(v) {
continue continue loop
} }
// Values in the buffer are guaranteed to be smaller than maxt. // Values in the buffer are guaranteed to be smaller than maxt.
if t >= mint { if t >= mint {
@ -1785,9 +1791,9 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
} }
} }
// The sought sample might also be in the range. // The sought sample might also be in the range.
if ok { switch soughtValueType {
if it.ChunkEncoding() == chunkenc.EncHistogram { case chunkenc.ValFloatHistogram, chunkenc.ValHistogram:
t, h := it.HistogramValues() t, h := it.FloatHistogramValues()
if t == maxt && !value.IsStaleNaN(h.Sum) { if t == maxt && !value.IsStaleNaN(h.Sum) {
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
@ -1795,7 +1801,7 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
out = append(out, Point{T: t, H: h}) out = append(out, Point{T: t, H: h})
ev.currentSamples++ ev.currentSamples++
} }
} else { case chunkenc.ValFloat:
t, v := it.Values() t, v := it.Values()
if t == maxt && !value.IsStaleNaN(v) { if t == maxt && !value.IsStaleNaN(v) {
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
@ -1805,7 +1811,6 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m
ev.currentSamples++ ev.currentSamples++
} }
} }
}
return out return out
} }

View File

@ -21,6 +21,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
func TestLazyLoader_WithSamplesTill(t *testing.T) { func TestLazyLoader_WithSamplesTill(t *testing.T) {
@ -143,7 +144,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
Metric: storageSeries.Labels(), Metric: storageSeries.Labels(),
} }
it := storageSeries.Iterator() it := storageSeries.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
got.Points = append(got.Points, Point{T: t, V: v}) got.Points = append(got.Points, Point{T: t, V: v})
} }

View File

@ -83,7 +83,7 @@ func (s Series) String() string {
type Point struct { type Point struct {
T int64 T int64
V float64 V float64
H *histogram.Histogram H *histogram.FloatHistogram
} }
func (p Point) String() string { func (p Point) String() string {
@ -98,6 +98,7 @@ func (p Point) String() string {
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
func (p Point) MarshalJSON() ([]byte, error) { func (p Point) MarshalJSON() ([]byte, error) {
// TODO(beorn7): Support histogram.
v := strconv.FormatFloat(p.V, 'f', -1, 64) v := strconv.FormatFloat(p.V, 'f', -1, 64)
return json.Marshal([...]interface{}{float64(p.T) / 1000, v}) return json.Marshal([...]interface{}{float64(p.T) / 1000, v})
} }
@ -284,19 +285,23 @@ func newStorageSeriesIterator(series Series) *storageSeriesIterator {
} }
} }
func (ssi *storageSeriesIterator) Seek(t int64) bool { func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType {
i := ssi.curr i := ssi.curr
if i < 0 { if i < 0 {
i = 0 i = 0
} }
for ; i < len(ssi.points); i++ { for ; i < len(ssi.points); i++ {
if ssi.points[i].T >= t { p := ssi.points[i]
if p.T >= t {
ssi.curr = i ssi.curr = i
return true if p.H != nil {
return chunkenc.ValFloatHistogram
}
return chunkenc.ValFloat
} }
} }
ssi.curr = len(ssi.points) - 1 ssi.curr = len(ssi.points) - 1
return false return chunkenc.ValNone
} }
func (ssi *storageSeriesIterator) At() (t int64, v float64) { func (ssi *storageSeriesIterator) At() (t int64, v float64) {
@ -305,17 +310,29 @@ func (ssi *storageSeriesIterator) At() (t int64, v float64) {
} }
func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
panic(errors.New("storageSeriesIterator: AtHistogram not supported"))
}
func (ssi *storageSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
p := ssi.points[ssi.curr] p := ssi.points[ssi.curr]
return p.T, p.H return p.T, p.H
} }
func (ssi *storageSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (ssi *storageSeriesIterator) AtT() int64 {
return chunkenc.EncXOR p := ssi.points[ssi.curr]
return p.T
} }
func (ssi *storageSeriesIterator) Next() bool { func (ssi *storageSeriesIterator) Next() chunkenc.ValueType {
ssi.curr++ ssi.curr++
return ssi.curr < len(ssi.points) if ssi.curr >= len(ssi.points) {
return chunkenc.ValNone
}
p := ssi.points[ssi.curr]
if p.H != nil {
return chunkenc.ValFloatHistogram
}
return chunkenc.ValFloat
} }
func (ssi *storageSeriesIterator) Err() error { func (ssi *storageSeriesIterator) Err() error {

View File

@ -36,6 +36,7 @@ import (
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
// RuleHealth describes the health state of a rule. // RuleHealth describes the health state of a rule.
@ -787,7 +788,7 @@ func (g *Group) RestoreForState(ts time.Time) {
var t int64 var t int64
var v float64 var v float64
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
t, v = it.At() t, v = it.At()
} }
if it.Err() != nil { if it.Err() != nil {

View File

@ -37,6 +37,7 @@ import (
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/util/teststorage" "github.com/prometheus/prometheus/util/teststorage"
) )
@ -597,7 +598,7 @@ func readSeriesSet(ss storage.SeriesSet) (map[string][]promql.Point, error) {
points := []promql.Point{} points := []promql.Point{}
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
points = append(points, promql.Point{T: t, V: v}) points = append(points, promql.Point{T: t, V: v})
} }

View File

@ -45,6 +45,7 @@ import (
"github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/model/timestamp"
"github.com/prometheus/prometheus/model/value" "github.com/prometheus/prometheus/model/value"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/util/teststorage" "github.com/prometheus/prometheus/util/teststorage"
"github.com/prometheus/prometheus/util/testutil" "github.com/prometheus/prometheus/util/testutil"
) )
@ -2755,7 +2756,7 @@ func TestScrapeReportSingleAppender(t *testing.T) {
c := 0 c := 0
for series.Next() { for series.Next() {
i := series.At().Iterator() i := series.At().Iterator()
for i.Next() { for i.Next() != chunkenc.ValNone {
c++ c++
} }
} }

View File

@ -14,6 +14,7 @@
package storage package storage
import ( import (
"fmt"
"math" "math"
"github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/histogram"
@ -27,7 +28,7 @@ type BufferedSeriesIterator struct {
delta int64 delta int64
lastTime int64 lastTime int64
ok bool valueType chunkenc.ValueType
} }
// NewBuffer returns a new iterator that buffers the values within the time range // NewBuffer returns a new iterator that buffers the values within the time range
@ -42,7 +43,7 @@ func NewBuffer(delta int64) *BufferedSeriesIterator {
func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator { func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator {
// TODO(codesome): based on encoding, allocate different buffer. // TODO(codesome): based on encoding, allocate different buffer.
bit := &BufferedSeriesIterator{ bit := &BufferedSeriesIterator{
buf: newSampleRing(delta, 16, it.ChunkEncoding()), buf: newSampleRing(delta, 16),
delta: delta, delta: delta,
} }
bit.Reset(it) bit.Reset(it)
@ -55,10 +56,9 @@ func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterato
func (b *BufferedSeriesIterator) Reset(it chunkenc.Iterator) { func (b *BufferedSeriesIterator) Reset(it chunkenc.Iterator) {
b.it = it b.it = it
b.lastTime = math.MinInt64 b.lastTime = math.MinInt64
b.ok = true
b.buf.reset() b.buf.reset()
b.buf.delta = b.delta b.buf.delta = b.delta
it.Next() b.valueType = it.Next()
} }
// ReduceDelta lowers the buffered time delta, for the current SeriesIterator only. // ReduceDelta lowers the buffered time delta, for the current SeriesIterator only.
@ -80,7 +80,7 @@ func (b *BufferedSeriesIterator) Buffer() chunkenc.Iterator {
} }
// Seek advances the iterator to the element at time t or greater. // Seek advances the iterator to the element at time t or greater.
func (b *BufferedSeriesIterator) Seek(t int64) bool { func (b *BufferedSeriesIterator) Seek(t int64) chunkenc.ValueType {
t0 := t - b.buf.delta t0 := t - b.buf.delta
// If the delta would cause us to seek backwards, preserve the buffer // If the delta would cause us to seek backwards, preserve the buffer
@ -88,54 +88,64 @@ func (b *BufferedSeriesIterator) Seek(t int64) bool {
if t0 > b.lastTime { if t0 > b.lastTime {
b.buf.reset() b.buf.reset()
b.ok = b.it.Seek(t0) b.valueType = b.it.Seek(t0)
if !b.ok { switch b.valueType {
return false case chunkenc.ValNone:
} return chunkenc.ValNone
if b.it.ChunkEncoding() == chunkenc.EncHistogram { case chunkenc.ValFloat:
b.lastTime, _ = b.HistogramValues()
} else {
b.lastTime, _ = b.Values() b.lastTime, _ = b.Values()
case chunkenc.ValHistogram:
b.lastTime, _ = b.HistogramValues()
case chunkenc.ValFloatHistogram:
b.lastTime, _ = b.FloatHistogramValues()
default:
panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType))
} }
} }
if b.lastTime >= t { if b.lastTime >= t {
return true return b.valueType
} }
for b.Next() { for {
if b.lastTime >= t { if b.valueType = b.Next(); b.valueType == chunkenc.ValNone || b.lastTime >= t {
return true return b.valueType
} }
} }
return false
} }
// Next advances the iterator to the next element. // Next advances the iterator to the next element.
func (b *BufferedSeriesIterator) Next() bool { func (b *BufferedSeriesIterator) Next() chunkenc.ValueType {
if !b.ok {
return false
}
// Add current element to buffer before advancing. // Add current element to buffer before advancing.
if b.it.ChunkEncoding() == chunkenc.EncHistogram { switch b.valueType {
t, h := b.it.AtHistogram() case chunkenc.ValNone:
b.buf.add(sample{t: t, h: h}) return chunkenc.ValNone
} else { case chunkenc.ValFloat:
t, v := b.it.At() t, v := b.it.At()
b.buf.add(sample{t: t, v: v}) b.buf.add(sample{t: t, v: v})
case chunkenc.ValHistogram:
t, h := b.it.AtHistogram()
b.buf.add(sample{t: t, h: h})
case chunkenc.ValFloatHistogram:
t, fh := b.it.AtFloatHistogram()
b.buf.add(sample{t: t, fh: fh})
default:
panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType))
} }
b.ok = b.it.Next() b.valueType = b.it.Next()
if b.ok { switch b.valueType {
if b.it.ChunkEncoding() == chunkenc.EncHistogram { case chunkenc.ValNone:
b.lastTime, _ = b.HistogramValues() // Do nothing.
} else { case chunkenc.ValFloat:
b.lastTime, _ = b.Values() b.lastTime, _ = b.Values()
case chunkenc.ValHistogram:
b.lastTime, _ = b.HistogramValues()
case chunkenc.ValFloatHistogram:
b.lastTime, _ = b.FloatHistogramValues()
default:
panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType))
} }
} return b.valueType
return b.ok
} }
// Values returns the current element of the iterator. // Values returns the current element of the iterator.
@ -148,9 +158,9 @@ func (b *BufferedSeriesIterator) HistogramValues() (int64, *histogram.Histogram)
return b.it.AtHistogram() return b.it.AtHistogram()
} }
// ChunkEncoding return the chunk encoding of the underlying iterator. // FloatHistogramValues returns the current float-histogram element of the iterator.
func (b *BufferedSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (b *BufferedSeriesIterator) FloatHistogramValues() (int64, *histogram.FloatHistogram) {
return b.it.ChunkEncoding() return b.it.AtFloatHistogram()
} }
// Err returns the last encountered error. // Err returns the last encountered error.
@ -158,10 +168,12 @@ func (b *BufferedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()
} }
// TODO(beorn7): Consider having different sample types for different value types.
type sample struct { type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram
} }
func (s sample) T() int64 { func (s sample) T() int64 {
@ -176,10 +188,24 @@ func (s sample) H() *histogram.Histogram {
return s.h return s.h
} }
func (s sample) FH() *histogram.FloatHistogram {
return s.fh
}
func (s sample) Type() chunkenc.ValueType {
switch {
case s.h != nil:
return chunkenc.ValHistogram
case s.fh != nil:
return chunkenc.ValFloatHistogram
default:
return chunkenc.ValFloat
}
}
type sampleRing struct { type sampleRing struct {
delta int64 delta int64
enc chunkenc.Encoding
buf []sample // lookback buffer buf []sample // lookback buffer
i int // position of most recent element in ring buffer i int // position of most recent element in ring buffer
f int // position of first element in ring buffer f int // position of first element in ring buffer
@ -188,8 +214,8 @@ type sampleRing struct {
it sampleRingIterator it sampleRingIterator
} }
func newSampleRing(delta int64, sz int, enc chunkenc.Encoding) *sampleRing { func newSampleRing(delta int64, sz int) *sampleRing {
r := &sampleRing{delta: delta, buf: make([]sample, sz), enc: enc} r := &sampleRing{delta: delta, buf: make([]sample, sz)}
r.reset() r.reset()
return r return r
@ -213,13 +239,24 @@ type sampleRingIterator struct {
i int i int
} }
func (it *sampleRingIterator) Next() bool { func (it *sampleRingIterator) Next() chunkenc.ValueType {
it.i++ it.i++
return it.i < it.r.l if it.i >= it.r.l {
return chunkenc.ValNone
}
s := it.r.at(it.i)
switch {
case s.h != nil:
return chunkenc.ValHistogram
case s.fh != nil:
return chunkenc.ValFloatHistogram
default:
return chunkenc.ValFloat
}
} }
func (it *sampleRingIterator) Seek(int64) bool { func (it *sampleRingIterator) Seek(int64) chunkenc.ValueType {
return false return chunkenc.ValNone
} }
func (it *sampleRingIterator) Err() error { func (it *sampleRingIterator) Err() error {
@ -227,30 +264,29 @@ func (it *sampleRingIterator) Err() error {
} }
func (it *sampleRingIterator) At() (int64, float64) { func (it *sampleRingIterator) At() (int64, float64) {
return it.r.at(it.i) s := it.r.at(it.i)
}
func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
return it.r.atHistogram(it.i)
}
func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding {
return it.r.enc
}
func (r *sampleRing) at(i int) (int64, float64) {
j := (r.f + i) % len(r.buf)
s := r.buf[j]
return s.t, s.v return s.t, s.v
} }
func (r *sampleRing) atHistogram(i int) (int64, *histogram.Histogram) { func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
j := (r.f + i) % len(r.buf) s := it.r.at(it.i)
s := r.buf[j]
return s.t, s.h return s.t, s.h
} }
func (r *sampleRing) atSample(i int) sample { func (it *sampleRingIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
s := it.r.at(it.i)
if s.fh == nil {
return s.t, s.h.ToFloat()
}
return s.t, s.fh
}
func (it *sampleRingIterator) AtT() int64 {
s := it.r.at(it.i)
return s.t
}
func (r *sampleRing) at(i int) sample {
j := (r.f + i) % len(r.buf) j := (r.f + i) % len(r.buf)
return r.buf[j] return r.buf[j]
} }
@ -320,7 +356,7 @@ func (r *sampleRing) nthLast(n int) (sample, bool) {
if n > r.l { if n > r.l {
return sample{}, false return sample{}, false
} }
return r.atSample(r.l - n), true return r.at(r.l - n), true
} }
func (r *sampleRing) samples() []sample { func (r *sampleRing) samples() []sample {

View File

@ -56,7 +56,7 @@ func TestSampleRing(t *testing.T) {
}, },
} }
for _, c := range cases { for _, c := range cases {
r := newSampleRing(c.delta, c.size, chunkenc.EncNone) r := newSampleRing(c.delta, c.size)
input := []sample{} input := []sample{}
for _, t := range c.input { for _, t := range c.input {
@ -95,7 +95,7 @@ func TestBufferedSeriesIterator(t *testing.T) {
bufferEq := func(exp []sample) { bufferEq := func(exp []sample) {
var b []sample var b []sample
bit := it.Buffer() bit := it.Buffer()
for bit.Next() { for bit.Next() == chunkenc.ValFloat {
t, v := bit.At() t, v := bit.At()
b = append(b, sample{t: t, v: v}) b = append(b, sample{t: t, v: v})
} }
@ -124,34 +124,34 @@ func TestBufferedSeriesIterator(t *testing.T) {
sample{t: 101, v: 10}, sample{t: 101, v: 10},
}), 2) }), 2)
require.True(t, it.Seek(-123), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed")
sampleEq(1, 2) sampleEq(1, 2)
prevSampleEq(0, 0, false) prevSampleEq(0, 0, false)
bufferEq(nil) bufferEq(nil)
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(2, 3) sampleEq(2, 3)
prevSampleEq(1, 2, true) prevSampleEq(1, 2, true)
bufferEq([]sample{{t: 1, v: 2}}) bufferEq([]sample{{t: 1, v: 2}})
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}})
require.True(t, it.Seek(5), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}})
require.True(t, it.Seek(101), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed")
sampleEq(101, 10) sampleEq(101, 10)
prevSampleEq(100, 9, true) prevSampleEq(100, 9, true)
bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}}) bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}})
require.False(t, it.Next(), "next succeeded unexpectedly") require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly")
} }
// At() should not be called once Next() returns false. // At() should not be called once Next() returns false.
@ -159,13 +159,18 @@ func TestBufferedSeriesIteratorNoBadAt(t *testing.T) {
done := false done := false
m := &mockSeriesIterator{ m := &mockSeriesIterator{
seek: func(int64) bool { return false }, seek: func(int64) chunkenc.ValueType { return chunkenc.ValNone },
at: func() (int64, float64) { at: func() (int64, float64) {
require.False(t, done, "unexpectedly done") require.False(t, done, "unexpectedly done")
done = true done = true
return 0, 0 return 0, 0
}, },
next: func() bool { return !done }, next: func() chunkenc.ValueType {
if done {
return chunkenc.ValNone
}
return chunkenc.ValFloat
},
err: func() error { return nil }, err: func() error { return nil },
} }
@ -182,30 +187,35 @@ func BenchmarkBufferedSeriesIterator(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
b.ResetTimer() b.ResetTimer()
for it.Next() { for it.Next() != chunkenc.ValNone {
// scan everything // scan everything
} }
require.NoError(b, it.Err()) require.NoError(b, it.Err())
} }
type mockSeriesIterator struct { type mockSeriesIterator struct {
seek func(int64) bool seek func(int64) chunkenc.ValueType
at func() (int64, float64) at func() (int64, float64)
next func() bool next func() chunkenc.ValueType
err func() error err func() error
} }
func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) } func (m *mockSeriesIterator) Seek(t int64) chunkenc.ValueType { return m.seek(t) }
func (m *mockSeriesIterator) At() (int64, float64) { return m.at() } func (m *mockSeriesIterator) At() (int64, float64) { return m.at() }
func (m *mockSeriesIterator) Next() chunkenc.ValueType { return m.next() }
func (m *mockSeriesIterator) Err() error { return m.err() }
func (m *mockSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (m *mockSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, nil return 0, nil // Not really mocked.
} }
func (m *mockSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (m *mockSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return chunkenc.EncXOR return 0, nil // Not really mocked.
}
func (m *mockSeriesIterator) AtT() int64 {
return 0 // Not really mocked.
} }
func (m *mockSeriesIterator) Next() bool { return m.next() }
func (m *mockSeriesIterator) Err() error { return m.err() }
type fakeSeriesIterator struct { type fakeSeriesIterator struct {
nsamples int64 nsamples int64
@ -225,18 +235,28 @@ func (it *fakeSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return it.idx * it.step, &histogram.Histogram{} // Value doesn't matter. return it.idx * it.step, &histogram.Histogram{} // Value doesn't matter.
} }
func (it *fakeSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *fakeSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return chunkenc.EncXOR return it.idx * it.step, &histogram.FloatHistogram{} // Value doesn't matter.
} }
func (it *fakeSeriesIterator) Next() bool { func (it *fakeSeriesIterator) AtT() int64 {
return it.idx * it.step
}
func (it *fakeSeriesIterator) Next() chunkenc.ValueType {
it.idx++ it.idx++
return it.idx < it.nsamples if it.idx >= it.nsamples {
return chunkenc.ValNone
}
return chunkenc.ValFloat
} }
func (it *fakeSeriesIterator) Seek(t int64) bool { func (it *fakeSeriesIterator) Seek(t int64) chunkenc.ValueType {
it.idx = t / it.step it.idx = t / it.step
return it.idx < it.nsamples if it.idx >= it.nsamples {
return chunkenc.ValNone
}
return chunkenc.ValFloat
} }
func (it *fakeSeriesIterator) Err() error { return nil } func (it *fakeSeriesIterator) Err() error { return nil }

View File

@ -23,6 +23,7 @@ import (
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/util/teststorage" "github.com/prometheus/prometheus/util/teststorage"
) )
@ -90,7 +91,7 @@ func TestFanout_SelectSorted(t *testing.T) {
seriesLabels := series.Labels() seriesLabels := series.Labels()
labelsResult = seriesLabels labelsResult = seriesLabels
iterator := series.Iterator() iterator := series.Iterator()
for iterator.Next() { for iterator.Next() == chunkenc.ValFloat {
timestamp, value := iterator.At() timestamp, value := iterator.At()
result[timestamp] = value result[timestamp] = value
} }
@ -116,7 +117,7 @@ func TestFanout_SelectSorted(t *testing.T) {
seriesLabels := series.Labels() seriesLabels := series.Labels()
labelsResult = seriesLabels labelsResult = seriesLabels
iterator := series.Iterator() iterator := series.Iterator()
for iterator.Next() { for iterator.Next() == chunkenc.ValFloat {
timestamp, value := iterator.At() timestamp, value := iterator.At()
result[timestamp] = value result[timestamp] = value
} }

View File

@ -20,27 +20,23 @@ import (
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
) )
// ValueType defines the type of a value in the storage.
type ValueType int
const (
ValNone ValueType = iota
ValFloat
ValHistogram
)
// MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element. // MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element.
type MemoizedSeriesIterator struct { type MemoizedSeriesIterator struct {
it chunkenc.Iterator it chunkenc.Iterator
delta int64 delta int64
lastTime int64 lastTime int64
valueType ValueType valueType chunkenc.ValueType
// Keep track of the previously returned value. // Keep track of the previously returned value.
prevTime int64 prevTime int64
prevValue float64 prevValue float64
prevHistogram *histogram.Histogram prevHistogram *histogram.Histogram
prevFloatHistogram *histogram.FloatHistogram
// TODO(beorn7): MemoizedSeriesIterator is currently only used by the
// PromQL engine, which only works with FloatHistograms. For better
// performance, we could change MemoizedSeriesIterator to also only
// handle FloatHistograms.
} }
// NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator. // NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator.
@ -65,25 +61,20 @@ func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) {
b.it = it b.it = it
b.lastTime = math.MinInt64 b.lastTime = math.MinInt64
b.prevTime = math.MinInt64 b.prevTime = math.MinInt64
it.Next() b.valueType = it.Next()
if it.ChunkEncoding() == chunkenc.EncHistogram {
b.valueType = ValHistogram
} else {
b.valueType = ValFloat
}
} }
// PeekPrev returns the previous element of the iterator. If there is none buffered, // PeekPrev returns the previous element of the iterator. If there is none buffered,
// ok is false. // ok is false.
func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, h *histogram.Histogram, ok bool) { func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ok bool) {
if b.prevTime == math.MinInt64 { if b.prevTime == math.MinInt64 {
return 0, 0, nil, false return 0, 0, nil, nil, false
} }
return b.prevTime, b.prevValue, b.prevHistogram, true return b.prevTime, b.prevValue, b.prevHistogram, b.prevFloatHistogram, true
} }
// Seek advances the iterator to the element at time t or greater. // Seek advances the iterator to the element at time t or greater.
func (b *MemoizedSeriesIterator) Seek(t int64) ValueType { func (b *MemoizedSeriesIterator) Seek(t int64) chunkenc.ValueType {
t0 := t - b.delta t0 := t - b.delta
if t0 > b.lastTime { if t0 > b.lastTime {
@ -91,59 +82,47 @@ func (b *MemoizedSeriesIterator) Seek(t int64) ValueType {
// more than the delta. // more than the delta.
b.prevTime = math.MinInt64 b.prevTime = math.MinInt64
ok := b.it.Seek(t0) b.valueType = b.it.Seek(t0)
if !ok { if b.valueType == chunkenc.ValNone {
b.valueType = ValNone return chunkenc.ValNone
return ValNone
} }
if b.it.ChunkEncoding() == chunkenc.EncHistogram { b.lastTime = b.it.AtT()
b.valueType = ValHistogram
b.lastTime, _ = b.it.AtHistogram()
} else {
b.valueType = ValFloat
b.lastTime, _ = b.it.At()
} }
}
if b.lastTime >= t { if b.lastTime >= t {
return b.valueType return b.valueType
} }
for b.Next() != ValNone { for b.Next() != chunkenc.ValNone {
if b.lastTime >= t { if b.lastTime >= t {
return b.valueType return b.valueType
} }
} }
return ValNone return chunkenc.ValNone
} }
// Next advances the iterator to the next element. // Next advances the iterator to the next element.
func (b *MemoizedSeriesIterator) Next() ValueType { func (b *MemoizedSeriesIterator) Next() chunkenc.ValueType {
if b.valueType == ValNone {
return ValNone
}
// Keep track of the previous element. // Keep track of the previous element.
if b.it.ChunkEncoding() == chunkenc.EncHistogram { switch b.valueType {
b.prevTime, b.prevHistogram = b.it.AtHistogram() case chunkenc.ValNone:
b.prevValue = 0 return chunkenc.ValNone
} else { case chunkenc.ValFloat:
b.prevTime, b.prevValue = b.it.At() b.prevTime, b.prevValue = b.it.At()
b.prevHistogram = nil b.prevHistogram = nil
b.prevFloatHistogram = nil
case chunkenc.ValHistogram:
b.prevValue = 0
b.prevTime, b.prevHistogram = b.it.AtHistogram()
_, b.prevFloatHistogram = b.it.AtFloatHistogram()
case chunkenc.ValFloatHistogram:
b.prevValue = 0
b.prevHistogram = nil
b.prevTime, b.prevFloatHistogram = b.it.AtFloatHistogram()
} }
ok := b.it.Next() b.valueType = b.it.Next()
if ok { if b.valueType != chunkenc.ValNone {
if b.it.ChunkEncoding() == chunkenc.EncHistogram { b.lastTime = b.it.AtT()
b.lastTime, _ = b.it.AtHistogram()
b.valueType = ValHistogram
} else {
b.lastTime, _ = b.it.At()
b.valueType = ValFloat
}
} else {
b.valueType = ValNone
} }
return b.valueType return b.valueType
} }
@ -158,6 +137,11 @@ func (b *MemoizedSeriesIterator) HistogramValues() (int64, *histogram.Histogram)
return b.it.AtHistogram() return b.it.AtHistogram()
} }
// Values returns the current element of the iterator.
func (b *MemoizedSeriesIterator) FloatHistogramValues() (int64, *histogram.FloatHistogram) {
return b.it.AtFloatHistogram()
}
// Err returns the last encountered error. // Err returns the last encountered error.
func (b *MemoizedSeriesIterator) Err() error { func (b *MemoizedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()

View File

@ -17,6 +17,8 @@ import (
"testing" "testing"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
func TestMemoizedSeriesIterator(t *testing.T) { func TestMemoizedSeriesIterator(t *testing.T) {
@ -29,7 +31,7 @@ func TestMemoizedSeriesIterator(t *testing.T) {
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
} }
prevSampleEq := func(ets int64, ev float64, eok bool) { prevSampleEq := func(ets int64, ev float64, eok bool) {
ts, v, _, ok := it.PeekPrev() ts, v, _, _, ok := it.PeekPrev()
require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ets, ts, "timestamp mismatch")
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
@ -46,29 +48,29 @@ func TestMemoizedSeriesIterator(t *testing.T) {
sample{t: 101, v: 10}, sample{t: 101, v: 10},
}), 2) }), 2)
require.Equal(t, it.Seek(-123), ValFloat, "seek failed") require.Equal(t, it.Seek(-123), chunkenc.ValFloat, "seek failed")
sampleEq(1, 2) sampleEq(1, 2)
prevSampleEq(0, 0, false) prevSampleEq(0, 0, false)
require.Equal(t, it.Next(), ValFloat, "next failed") require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed")
sampleEq(2, 3) sampleEq(2, 3)
prevSampleEq(1, 2, true) prevSampleEq(1, 2, true)
require.Equal(t, it.Next(), ValFloat, "next failed") require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed")
require.Equal(t, it.Next(), ValFloat, "next failed") require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed")
require.Equal(t, it.Next(), ValFloat, "next failed") require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
require.Equal(t, it.Seek(5), ValFloat, "seek failed") require.Equal(t, it.Seek(5), chunkenc.ValFloat, "seek failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
require.Equal(t, it.Seek(101), ValFloat, "seek failed") require.Equal(t, it.Seek(101), chunkenc.ValFloat, "seek failed")
sampleEq(101, 10) sampleEq(101, 10)
prevSampleEq(100, 9, true) prevSampleEq(100, 9, true)
require.Equal(t, it.Next(), ValNone, "next succeeded unexpectedly") require.Equal(t, it.Next(), chunkenc.ValNone, "next succeeded unexpectedly")
} }
func BenchmarkMemoizedSeriesIterator(b *testing.B) { func BenchmarkMemoizedSeriesIterator(b *testing.B) {
@ -79,7 +81,7 @@ func BenchmarkMemoizedSeriesIterator(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
b.ResetTimer() b.ResetTimer()
for it.Next() != ValNone { for it.Next() != chunkenc.ValNone {
// scan everything // scan everything
} }
require.NoError(b, it.Err()) require.NoError(b, it.Err())

View File

@ -443,7 +443,7 @@ type chainSampleIterator struct {
h samplesIteratorHeap h samplesIteratorHeap
curr chunkenc.Iterator curr chunkenc.Iterator
lastt int64 lastT int64
} }
// NewChainSampleIterator returns a single iterator that iterates over the samples from the given iterators in a sorted // NewChainSampleIterator returns a single iterator that iterates over the samples from the given iterators in a sorted
@ -453,77 +453,82 @@ func NewChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator {
return &chainSampleIterator{ return &chainSampleIterator{
iterators: iterators, iterators: iterators,
h: nil, h: nil,
lastt: math.MinInt64, lastT: math.MinInt64,
} }
} }
func (c *chainSampleIterator) Seek(t int64) bool { func (c *chainSampleIterator) Seek(t int64) chunkenc.ValueType {
// No-op check.
if c.curr != nil && c.lastT >= t {
return c.curr.Seek(c.lastT)
}
c.h = samplesIteratorHeap{} c.h = samplesIteratorHeap{}
for _, iter := range c.iterators { for _, iter := range c.iterators {
if iter.Seek(t) { if iter.Seek(t) != chunkenc.ValNone {
heap.Push(&c.h, iter) heap.Push(&c.h, iter)
} }
} }
if len(c.h) > 0 { if len(c.h) > 0 {
c.curr = heap.Pop(&c.h).(chunkenc.Iterator) c.curr = heap.Pop(&c.h).(chunkenc.Iterator)
if c.curr.ChunkEncoding() == chunkenc.EncHistogram { c.lastT = c.curr.AtT()
c.lastt, _ = c.curr.AtHistogram() return c.curr.Seek(c.lastT)
} else {
c.lastt, _ = c.curr.At()
}
return true
} }
c.curr = nil c.curr = nil
return false return chunkenc.ValNone
} }
func (c *chainSampleIterator) At() (t int64, v float64) { func (c *chainSampleIterator) At() (t int64, v float64) {
if c.curr == nil { if c.curr == nil {
panic("chainSampleIterator.At() called before first .Next() or after .Next() returned false.") panic("chainSampleIterator.At called before first .Next or after .Next returned false.")
} }
return c.curr.At() return c.curr.At()
} }
func (c *chainSampleIterator) AtHistogram() (int64, *histogram.Histogram) { func (c *chainSampleIterator) AtHistogram() (int64, *histogram.Histogram) {
if c.curr == nil { if c.curr == nil {
panic("chainSampleIterator.AtHistogram() called before first .Next() or after .Next() returned false.") panic("chainSampleIterator.AtHistogram called before first .Next or after .Next returned false.")
} }
return c.curr.AtHistogram() return c.curr.AtHistogram()
} }
func (c *chainSampleIterator) ChunkEncoding() chunkenc.Encoding { func (c *chainSampleIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
if c.curr == nil { if c.curr == nil {
panic("chainSampleIterator.ChunkEncoding() called before first .Next() or after .Next() returned false.") panic("chainSampleIterator.AtFloatHistogram called before first .Next or after .Next returned false.")
} }
return c.curr.ChunkEncoding() return c.curr.AtFloatHistogram()
} }
func (c *chainSampleIterator) Next() bool { func (c *chainSampleIterator) AtT() int64 {
if c.curr == nil {
panic("chainSampleIterator.AtT called before first .Next or after .Next returned false.")
}
return c.curr.AtT()
}
func (c *chainSampleIterator) Next() chunkenc.ValueType {
if c.h == nil { if c.h == nil {
c.h = samplesIteratorHeap{} c.h = samplesIteratorHeap{}
// We call c.curr.Next() as the first thing below. // We call c.curr.Next() as the first thing below.
// So, we don't call Next() on it here. // So, we don't call Next() on it here.
c.curr = c.iterators[0] c.curr = c.iterators[0]
for _, iter := range c.iterators[1:] { for _, iter := range c.iterators[1:] {
if iter.Next() { if iter.Next() != chunkenc.ValNone {
heap.Push(&c.h, iter) heap.Push(&c.h, iter)
} }
} }
} }
if c.curr == nil { if c.curr == nil {
return false return chunkenc.ValNone
} }
var currt int64 var currT int64
var currValueType chunkenc.ValueType
for { for {
if c.curr.Next() { currValueType = c.curr.Next()
if c.curr.ChunkEncoding() == chunkenc.EncHistogram { if currValueType != chunkenc.ValNone {
currt, _ = c.curr.AtHistogram() currT = c.curr.AtT()
} else { if currT == c.lastT {
currt, _ = c.curr.At()
}
if currt == c.lastt {
// Ignoring sample for the same timestamp. // Ignoring sample for the same timestamp.
continue continue
} }
@ -534,13 +539,8 @@ func (c *chainSampleIterator) Next() bool {
} }
// Check current iterator with the top of the heap. // Check current iterator with the top of the heap.
var nextt int64 nextT := c.h[0].AtT()
if c.h[0].ChunkEncoding() == chunkenc.EncHistogram { if currT < nextT {
nextt, _ = c.h[0].AtHistogram()
} else {
nextt, _ = c.h[0].At()
}
if currt < nextt {
// Current iterator has smaller timestamp than the heap. // Current iterator has smaller timestamp than the heap.
break break
} }
@ -549,22 +549,19 @@ func (c *chainSampleIterator) Next() bool {
} else if len(c.h) == 0 { } else if len(c.h) == 0 {
// No iterator left to iterate. // No iterator left to iterate.
c.curr = nil c.curr = nil
return false return chunkenc.ValNone
} }
c.curr = heap.Pop(&c.h).(chunkenc.Iterator) c.curr = heap.Pop(&c.h).(chunkenc.Iterator)
if c.curr.ChunkEncoding() == chunkenc.EncHistogram { currT = c.curr.AtT()
currt, _ = c.curr.AtHistogram() currValueType = c.curr.Seek(currT)
} else { if currT != c.lastT {
currt, _ = c.curr.At()
}
if currt != c.lastt {
break break
} }
} }
c.lastt = currt c.lastT = currT
return true return currValueType
} }
func (c *chainSampleIterator) Err() error { func (c *chainSampleIterator) Err() error {
@ -581,18 +578,7 @@ func (h samplesIteratorHeap) Len() int { return len(h) }
func (h samplesIteratorHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] } func (h samplesIteratorHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h samplesIteratorHeap) Less(i, j int) bool { func (h samplesIteratorHeap) Less(i, j int) bool {
var at, bt int64 return h[i].AtT() < h[j].AtT()
if h[i].ChunkEncoding() == chunkenc.EncHistogram {
at, _ = h[i].AtHistogram()
} else {
at, _ = h[i].At()
}
if h[j].ChunkEncoding() == chunkenc.EncHistogram {
bt, _ = h[j].AtHistogram()
} else {
bt, _ = h[j].At()
}
return at < bt
} }
func (h *samplesIteratorHeap) Push(x interface{}) { func (h *samplesIteratorHeap) Push(x interface{}) {

View File

@ -62,116 +62,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
), ),
}, },
{ {
name: "two queriers, one different series each", name: "two queriers, one different series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
), ),
}, },
{ {
name: "two time unsorted queriers, two series each", name: "two time unsorted queriers, two series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "five queriers, only two queriers have two time unsorted series each", name: "five queriers, only two queriers have two time unsorted series each",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together", name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}),
}, {}}, }, {}},
extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()}, extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "two queriers, with two series, one is overlapping", name: "two queriers, with two series, one is overlapping",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 22, nil}, sample{3, 32, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 22, nil, nil}, sample{3, 32, nil, nil}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}, sample{1, 1, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}, sample{1, 1, nil, nil}}),
), ),
}, },
} { } {
@ -245,108 +245,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
), ),
}, },
{ {
name: "two secondaries, one different series each", name: "two secondaries, one different series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
), ),
}, },
{ {
name: "two secondaries, two not in time order series each", name: "two secondaries, two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{5, 5, nil, nil}},
[]tsdbutil.Sample{sample{6, 6, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}},
[]tsdbutil.Sample{sample{2, 2, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{4, 4, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "five secondaries, only two have two not in time order series each", name: "five secondaries, only two have two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{}, {}, { chkQuerierSeries: [][]ChunkSeries{{}, {}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}),
}, {}}, }, {}},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{5, 5, nil, nil}},
[]tsdbutil.Sample{sample{6, 6, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}},
[]tsdbutil.Sample{sample{2, 2, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{4, 4, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together", name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}),
}}, }},
extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()}, extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{5, 5, nil, nil}},
[]tsdbutil.Sample{sample{6, 6, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}},
[]tsdbutil.Sample{sample{2, 2, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}},
[]tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{4, 4, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}, []tsdbutil.Sample{sample{1, 1, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}}),
), ),
}, },
} { } {
@ -399,9 +399,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "single series", name: "single series",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}),
}, },
{ {
name: "two empty series", name: "two empty series",
@ -414,55 +414,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "two non overlapping", name: "two non overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}),
}, },
{ {
name: "two overlapping", name: "two overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{8, 8, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{8, 8, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{7, 7, nil}, sample{8, 8, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{7, 7, nil, nil}, sample{8, 8, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}),
}, },
{ {
name: "two duplicated", name: "two duplicated",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
}, },
{ {
name: "three overlapping", name: "three overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{6, 6, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{6, 6, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{4, 4, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{4, 4, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 6, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}),
}, },
{ {
name: "three in chained overlap", name: "three in chained overlap",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil}, sample{6, 66, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{6, 66, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil}, sample{10, 10, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil, nil}, sample{10, 10, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 66, nil}, sample{10, 10, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 66, nil, nil}, sample{10, 10, nil, nil}}),
}, },
{ {
name: "three in chained overlap complex", name: "three in chained overlap complex",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{10, 10, nil}, sample{15, 15, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}, sample{15, 15, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{20, 20, nil}}, []tsdbutil.Sample{sample{25, 25, nil}, sample{30, 30, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{20, 20, nil, nil}}, []tsdbutil.Sample{sample{25, 25, nil, nil}, sample{30, 30, nil, nil}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil}, sample{26, 26, nil}}, []tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil, nil}, sample{26, 26, nil, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{0, 0, nil}, sample{2, 2, nil}, sample{5, 5, nil}, sample{10, 10, nil}, sample{15, 15, nil}, sample{18, 18, nil}, sample{20, 20, nil}, sample{25, 25, nil}, sample{26, 26, nil}, sample{30, 30, nil}}, []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{5, 5, nil, nil}, sample{10, 10, nil, nil}, sample{15, 15, nil, nil}, sample{18, 18, nil, nil}, sample{20, 20, nil, nil}, sample{25, 25, nil, nil}, sample{26, 26, nil, nil}, sample{30, 30, nil, nil}},
[]tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}},
), ),
}, },
{ {
@ -598,38 +598,38 @@ func TestChainSampleIterator(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}),
NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}), NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}),
NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}), NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}),
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil},
}, },
}, },
// Overlap. // Overlap.
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}),
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}}),
NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
}, },
} { } {
merged := NewChainSampleIterator(tc.input) merged := NewChainSampleIterator(tc.input)
@ -647,42 +647,42 @@ func TestChainSampleIteratorSeek(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, },
seek: 1, seek: 1,
expected: []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, expected: []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}),
NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}}, expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}), NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}),
NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}), NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}}, expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}),
NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}),
}, },
seek: 0, seek: 0,
expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
}, },
} { } {
merged := NewChainSampleIterator(tc.input) merged := NewChainSampleIterator(tc.input)
actual := []tsdbutil.Sample{} actual := []tsdbutil.Sample{}
if merged.Seek(tc.seek) { if merged.Seek(tc.seek) == chunkenc.ValFloat {
t, v := merged.At() t, v := merged.At()
actual = append(actual, sample{t, v, nil}) actual = append(actual, sample{t, v, nil, nil})
} }
s, err := ExpandSamples(merged, nil) s, err := ExpandSamples(merged, nil)
require.NoError(t, err) require.NoError(t, err)

View File

@ -120,7 +120,8 @@ func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult,
iter := series.Iterator() iter := series.Iterator()
samples := []prompb.Sample{} samples := []prompb.Sample{}
for iter.Next() { for iter.Next() == chunkenc.ValFloat {
// TODO(beorn7): Add Histogram support.
numSamples++ numSamples++
if sampleLimit > 0 && numSamples > sampleLimit { if sampleLimit > 0 && numSamples > sampleLimit {
return nil, ss.Warnings(), HTTPError{ return nil, ss.Warnings(), HTTPError{
@ -357,14 +358,26 @@ func newConcreteSeriersIterator(series *concreteSeries) chunkenc.Iterator {
} }
// Seek implements storage.SeriesIterator. // Seek implements storage.SeriesIterator.
func (c *concreteSeriesIterator) Seek(t int64) bool { func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType {
c.cur = sort.Search(len(c.series.samples), func(n int) bool { if c.cur == -1 {
return c.series.samples[n].Timestamp >= t c.cur = 0
}
// No-op check.
if s := c.series.samples[c.cur]; s.Timestamp >= t {
return chunkenc.ValFloat
}
// Do binary search between current position and end.
c.cur += sort.Search(len(c.series.samples)-c.cur, func(n int) bool {
return c.series.samples[n+c.cur].Timestamp >= t
}) })
return c.cur < len(c.series.samples) if c.cur < len(c.series.samples) {
return chunkenc.ValFloat
}
return chunkenc.ValNone
// TODO(beorn7): Add histogram support.
} }
// At implements storage.SeriesIterator. // At implements chunkenc.Iterator.
func (c *concreteSeriesIterator) At() (t int64, v float64) { func (c *concreteSeriesIterator) At() (t int64, v float64) {
s := c.series.samples[c.cur] s := c.series.samples[c.cur]
return s.Timestamp, s.Value return s.Timestamp, s.Value
@ -377,17 +390,30 @@ func (c *concreteSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, nil return 0, nil
} }
func (c *concreteSeriesIterator) ChunkEncoding() chunkenc.Encoding { // AtFloatHistogram always returns (0, nil) because there is no support for histogram
return chunkenc.EncXOR // values yet.
// TODO(beorn7): Fix that for histogram support in remote storage.
func (c *concreteSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return 0, nil
} }
// Next implements storage.SeriesIterator. // AtT implements chunkenc.Iterator.
func (c *concreteSeriesIterator) Next() bool { func (c *concreteSeriesIterator) AtT() int64 {
s := c.series.samples[c.cur]
return s.Timestamp
}
// Next implements chunkenc.Iterator.
func (c *concreteSeriesIterator) Next() chunkenc.ValueType {
c.cur++ c.cur++
return c.cur < len(c.series.samples) if c.cur < len(c.series.samples) {
return chunkenc.ValFloat
}
return chunkenc.ValNone
// TODO(beorn7): Add histogram support.
} }
// Err implements storage.SeriesIterator. // Err implements chunkenc.Iterator.
func (c *concreteSeriesIterator) Err() error { func (c *concreteSeriesIterator) Err() error {
return nil return nil
} }

View File

@ -96,26 +96,42 @@ func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return s.T(), s.H() return s.T(), s.H()
} }
func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *listSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return chunkenc.EncXOR s := it.samples.Get(it.idx)
return s.T(), s.FH()
} }
func (it *listSeriesIterator) Next() bool { func (it *listSeriesIterator) AtT() int64 {
s := it.samples.Get(it.idx)
return s.T()
}
func (it *listSeriesIterator) Next() chunkenc.ValueType {
it.idx++ it.idx++
return it.idx < it.samples.Len() if it.idx >= it.samples.Len() {
return chunkenc.ValNone
}
return it.samples.Get(it.idx).Type()
} }
func (it *listSeriesIterator) Seek(t int64) bool { func (it *listSeriesIterator) Seek(t int64) chunkenc.ValueType {
if it.idx == -1 { if it.idx == -1 {
it.idx = 0 it.idx = 0
} }
// No-op check.
if s := it.samples.Get(it.idx); s.T() >= t {
return s.Type()
}
// Do binary search between current position and end. // Do binary search between current position and end.
it.idx = sort.Search(it.samples.Len()-it.idx, func(i int) bool { it.idx += sort.Search(it.samples.Len()-it.idx, func(i int) bool {
s := it.samples.Get(i + it.idx) s := it.samples.Get(i + it.idx)
return s.T() >= t return s.T() >= t
}) })
return it.idx < it.samples.Len() if it.idx >= it.samples.Len() {
return chunkenc.ValNone
}
return it.samples.Get(it.idx).Type()
} }
func (it *listSeriesIterator) Err() error { return nil } func (it *listSeriesIterator) Err() error { return nil }
@ -233,6 +249,7 @@ func NewSeriesToChunkEncoder(series Series) ChunkSeries {
} }
func (s *seriesToChunkEncoder) Iterator() chunks.Iterator { func (s *seriesToChunkEncoder) Iterator() chunks.Iterator {
// TODO(beorn7): Add Histogram support.
chk := chunkenc.NewXORChunk() chk := chunkenc.NewXORChunk()
app, err := chk.Appender() app, err := chk.Appender()
if err != nil { if err != nil {
@ -245,7 +262,7 @@ func (s *seriesToChunkEncoder) Iterator() chunks.Iterator {
i := 0 i := 0
seriesIter := s.Series.Iterator() seriesIter := s.Series.Iterator()
for seriesIter.Next() { for seriesIter.Next() == chunkenc.ValFloat {
// Create a new chunk if too many samples in the current one. // Create a new chunk if too many samples in the current one.
if i >= seriesToChunkEncoderSplit { if i >= seriesToChunkEncoderSplit {
chks = append(chks, chunks.Meta{ chks = append(chks, chunks.Meta{
@ -296,27 +313,34 @@ func (e errChunksIterator) Err() error { return e.err }
// ExpandSamples iterates over all samples in the iterator, buffering all in slice. // ExpandSamples iterates over all samples in the iterator, buffering all in slice.
// Optionally it takes samples constructor, useful when you want to compare sample slices with different // Optionally it takes samples constructor, useful when you want to compare sample slices with different
// sample implementations. if nil, sample type from this package will be used. // sample implementations. if nil, sample type from this package will be used.
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) { func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) {
if newSampleFn == nil { if newSampleFn == nil {
newSampleFn = func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} } newSampleFn = func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample {
return sample{t, v, h, fh}
}
} }
var result []tsdbutil.Sample var result []tsdbutil.Sample
for iter.Next() { for {
// Only after Next() returned true, it is safe to ask for the ChunkEncoding. switch iter.Next() {
if iter.ChunkEncoding() == chunkenc.EncHistogram { case chunkenc.ValNone:
t, h := iter.AtHistogram() return result, iter.Err()
result = append(result, newSampleFn(t, 0, h)) case chunkenc.ValFloat:
} else {
t, v := iter.At() t, v := iter.At()
// NaNs can't be compared normally, so substitute for another value. // NaNs can't be compared normally, so substitute for another value.
if math.IsNaN(v) { if math.IsNaN(v) {
v = -42 v = -42
} }
result = append(result, newSampleFn(t, v, nil)) result = append(result, newSampleFn(t, v, nil, nil))
case chunkenc.ValHistogram:
t, h := iter.AtHistogram()
result = append(result, newSampleFn(t, 0, h, nil))
case chunkenc.ValFloatHistogram:
t, fh := iter.AtFloatHistogram()
result = append(result, newSampleFn(t, 0, nil, fh))
} }
} }
return result, iter.Err()
} }
// ExpandChunks iterates over all chunks in the iterator, buffering all in slice. // ExpandChunks iterates over all chunks in the iterator, buffering all in slice.

View File

@ -32,6 +32,7 @@ import (
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/chunks"
"github.com/prometheus/prometheus/tsdb/fileutil" "github.com/prometheus/prometheus/tsdb/fileutil"
"github.com/prometheus/prometheus/tsdb/tsdbutil" "github.com/prometheus/prometheus/tsdb/tsdbutil"
@ -179,7 +180,7 @@ func TestCorruptedChunk(t *testing.T) {
require.NoError(t, os.RemoveAll(tmpdir)) require.NoError(t, os.RemoveAll(tmpdir))
}() }()
series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1, nil}}) series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1, nil, nil}})
blockDir := createBlock(t, tmpdir, []storage.Series{series}) blockDir := createBlock(t, tmpdir, []storage.Series{series})
files, err := sequenceFiles(chunkDir(blockDir)) files, err := sequenceFiles(chunkDir(blockDir))
require.NoError(t, err) require.NoError(t, err)
@ -208,7 +209,7 @@ func TestCorruptedChunk(t *testing.T) {
// Check chunk errors during iter time. // Check chunk errors during iter time.
require.True(t, set.Next()) require.True(t, set.Next())
it := set.At().Iterator() it := set.At().Iterator()
require.Equal(t, false, it.Next()) require.Equal(t, chunkenc.ValNone, it.Next())
require.Equal(t, tc.iterErr.Error(), it.Err().Error()) require.Equal(t, tc.iterErr.Error(), it.Err().Error())
}) })
} }
@ -226,7 +227,7 @@ func TestLabelValuesWithMatchers(t *testing.T) {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
}, []tsdbutil.Sample{sample{100, 0, nil}})) }, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
} }
blockDir := createBlock(t, tmpdir, seriesEntries) blockDir := createBlock(t, tmpdir, seriesEntries)
@ -389,7 +390,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) {
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))}, {Name: "tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))},
{Name: "ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1" {Name: "ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1"
}, []tsdbutil.Sample{sample{100, 0, nil}})) }, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
} }
blockDir := createBlock(b, tmpdir, seriesEntries) blockDir := createBlock(b, tmpdir, seriesEntries)
@ -427,13 +428,13 @@ func TestLabelNamesWithMatchers(t *testing.T) {
for i := 0; i < 100; i++ { for i := 0; i < 100; i++ {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
}, []tsdbutil.Sample{sample{100, 0, nil}})) }, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
if i%10 == 0 { if i%10 == 0 {
seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
}, []tsdbutil.Sample{sample{100, 0, nil}})) }, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
} }
if i%20 == 0 { if i%20 == 0 {
@ -441,7 +442,7 @@ func TestLabelNamesWithMatchers(t *testing.T) {
{Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "unique", Value: fmt.Sprintf("value%d", i)},
{Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)},
{Name: "twenties", Value: fmt.Sprintf("value%d", i/20)}, {Name: "twenties", Value: fmt.Sprintf("value%d", i/20)},
}, []tsdbutil.Sample{sample{100, 0, nil}})) }, []tsdbutil.Sample{sample{100, 0, nil, nil}}))
} }
} }
@ -525,7 +526,8 @@ func createHead(tb testing.TB, w *wal.WAL, series []storage.Series, chunkDir str
ref := storage.SeriesRef(0) ref := storage.SeriesRef(0)
it := s.Iterator() it := s.Iterator()
lset := s.Labels() lset := s.Labels()
for it.Next() { for it.Next() == chunkenc.ValFloat {
// TODO(beorn7): Also treat histograms.
t, v := it.At() t, v := it.At()
ref, err = app.Append(ref, lset, t, v) ref, err = app.Append(ref, lset, t, v)
require.NoError(tb, err) require.NoError(tb, err)

View File

@ -25,6 +25,13 @@ import (
// Encoding is the identifier for a chunk encoding. // Encoding is the identifier for a chunk encoding.
type Encoding uint8 type Encoding uint8
// The different available chunk encodings.
const (
EncNone Encoding = iota
EncXOR
EncHistogram
)
func (e Encoding) String() string { func (e Encoding) String() string {
switch e { switch e {
case EncNone: case EncNone:
@ -46,13 +53,6 @@ func IsValidEncoding(e Encoding) bool {
return false return false
} }
// The different available chunk encodings.
const (
EncNone Encoding = iota
EncXOR
EncHistogram
)
// Chunk holds a sequence of sample pairs that can be iterated over and appended to. // Chunk holds a sequence of sample pairs that can be iterated over and appended to.
type Chunk interface { type Chunk interface {
// Bytes returns the underlying byte slice of the chunk. // Bytes returns the underlying byte slice of the chunk.
@ -89,26 +89,61 @@ type Appender interface {
// Iterator is a simple iterator that can only get the next value. // Iterator is a simple iterator that can only get the next value.
// Iterator iterates over the samples of a time series, in timestamp-increasing order. // Iterator iterates over the samples of a time series, in timestamp-increasing order.
type Iterator interface { type Iterator interface {
// Next advances the iterator by one. // Next advances the iterator by one and returns the type of the value
// TODO(beorn7): Perhaps this should return if the next value is a float or a histogram // at the new position (or ValNone if the iterator is exhausted).
// to make it easier calling the right method (At vs AtHistogram)? Next() ValueType
Next() bool // Seek advances the iterator forward to the first sample with a
// Seek advances the iterator forward to the first sample with the timestamp equal or greater than t. // timestamp equal or greater than t. If the current sample found by a
// If current sample found by previous `Next` or `Seek` operation already has this property, Seek has no effect. // previous `Next` or `Seek` operation already has this property, Seek
// Seek returns true, if such sample exists, false otherwise. // has no effect. If a sample has been found, Seek returns the type of
// Iterator is exhausted when the Seek returns false. // its value. Otherwise, it returns ValNone, after with the iterator is
Seek(t int64) bool // exhausted.
// At returns the current timestamp/value pair. Seek(t int64) ValueType
// Before the iterator has advanced At behaviour is unspecified. // At returns the current timestamp/value pair if the value is a float.
// Before the iterator has advanced, the behaviour is unspecified.
At() (int64, float64) At() (int64, float64)
// AtHistogram returns the current timestamp/histogram pair. // AtHistogram returns the current timestamp/value pair if the value is
// Before the iterator has advanced AtHistogram behaviour is unspecified. // a histogram with integer counts. Before the iterator has advanced,
// the behaviour is unspecified.
AtHistogram() (int64, *histogram.Histogram) AtHistogram() (int64, *histogram.Histogram)
// Err returns the current error. It should be used only after iterator is // AtFloatHistogram returns the current timestamp/value pair if the
// exhausted, that is `Next` or `Seek` returns false. // value is a histogram with floating-point counts. It also works if the
// value is a histogram with integer counts, in which case a
// FloatHistogram copy of the histogram is returned. Before the iterator
// has advanced, the behaviour is unspecified.
AtFloatHistogram() (int64, *histogram.FloatHistogram)
// AtT returns the current timestamp.
// Before the iterator has advanced, the behaviour is unspecified.
AtT() int64
// Err returns the current error. It should be used only after the
// iterator is exhausted, i.e. `Next` or `Seek` have returned ValNone.
Err() error Err() error
// ChunkEncoding returns the encoding of the chunk that it is iterating. }
ChunkEncoding() Encoding
// ValueType defines the type of a value an Iterator points to.
type ValueType uint8
// Possible values for ValueType.
const (
ValNone ValueType = iota // No value at the current position.
ValFloat // A simple float, retrieved with At.
ValHistogram // A histogram, retrieve with AtHistogram, but AtFloatHistogram works, too.
ValFloatHistogram // A floating-point histogram, retrive with AtFloatHistogram.
)
func (v ValueType) String() string {
switch v {
case ValNone:
return "none"
case ValFloat:
return "float"
case ValHistogram:
return "histogram"
case ValFloatHistogram:
return "floathistogram"
default:
return "unknown"
}
} }
// NewNopIterator returns a new chunk iterator that does not hold any data. // NewNopIterator returns a new chunk iterator that does not hold any data.
@ -118,14 +153,13 @@ func NewNopIterator() Iterator {
type nopIterator struct{} type nopIterator struct{}
func (nopIterator) Seek(int64) bool { return false } func (nopIterator) Next() ValueType { return ValNone }
func (nopIterator) Seek(int64) ValueType { return ValNone }
func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 } func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 }
func (nopIterator) AtHistogram() (int64, *histogram.Histogram) { func (nopIterator) AtHistogram() (int64, *histogram.Histogram) { return math.MinInt64, nil }
return math.MinInt64, nil func (nopIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { return math.MinInt64, nil }
} func (nopIterator) AtT() int64 { return math.MinInt64 }
func (nopIterator) Next() bool { return false }
func (nopIterator) Err() error { return nil } func (nopIterator) Err() error { return nil }
func (nopIterator) ChunkEncoding() Encoding { return EncNone }
// Pool is used to create and reuse chunk references to avoid allocations. // Pool is used to create and reuse chunk references to avoid allocations.
type Pool interface { type Pool interface {

View File

@ -71,7 +71,7 @@ func testChunk(t *testing.T, c Chunk) {
// 1. Expand iterator in simple case. // 1. Expand iterator in simple case.
it1 := c.Iterator(nil) it1 := c.Iterator(nil)
var res1 []pair var res1 []pair
for it1.Next() { for it1.Next() == ValFloat {
ts, v := it1.At() ts, v := it1.At()
res1 = append(res1, pair{t: ts, v: v}) res1 = append(res1, pair{t: ts, v: v})
} }
@ -81,7 +81,7 @@ func testChunk(t *testing.T, c Chunk) {
// 2. Expand second iterator while reusing first one. // 2. Expand second iterator while reusing first one.
it2 := c.Iterator(it1) it2 := c.Iterator(it1)
var res2 []pair var res2 []pair
for it2.Next() { for it2.Next() == ValFloat {
ts, v := it2.At() ts, v := it2.At()
res2 = append(res2, pair{t: ts, v: v}) res2 = append(res2, pair{t: ts, v: v})
} }
@ -93,20 +93,20 @@ func testChunk(t *testing.T, c Chunk) {
it3 := c.Iterator(nil) it3 := c.Iterator(nil)
var res3 []pair var res3 []pair
require.Equal(t, true, it3.Seek(exp[mid].t)) require.Equal(t, ValFloat, it3.Seek(exp[mid].t))
// Below ones should not matter. // Below ones should not matter.
require.Equal(t, true, it3.Seek(exp[mid].t)) require.Equal(t, ValFloat, it3.Seek(exp[mid].t))
require.Equal(t, true, it3.Seek(exp[mid].t)) require.Equal(t, ValFloat, it3.Seek(exp[mid].t))
ts, v = it3.At() ts, v = it3.At()
res3 = append(res3, pair{t: ts, v: v}) res3 = append(res3, pair{t: ts, v: v})
for it3.Next() { for it3.Next() == ValFloat {
ts, v := it3.At() ts, v := it3.At()
res3 = append(res3, pair{t: ts, v: v}) res3 = append(res3, pair{t: ts, v: v})
} }
require.NoError(t, it3.Err()) require.NoError(t, it3.Err())
require.Equal(t, exp[mid:], res3) require.Equal(t, exp[mid:], res3)
require.Equal(t, false, it3.Seek(exp[len(exp)-1].t+1)) require.Equal(t, ValNone, it3.Seek(exp[len(exp)-1].t+1))
} }
func benchmarkIterator(b *testing.B, newChunk func() Chunk) { func benchmarkIterator(b *testing.B, newChunk func() Chunk) {
@ -148,7 +148,7 @@ func benchmarkIterator(b *testing.B, newChunk func() Chunk) {
for i := 0; i < b.N; { for i := 0; i < b.N; {
it := chunk.Iterator(it) it := chunk.Iterator(it)
for it.Next() { for it.Next() == ValFloat {
_, v := it.At() _, v := it.At()
res = v res = v
i++ i++

View File

@ -121,7 +121,7 @@ func (c *HistogramChunk) Appender() (Appender, error) {
// To get an appender, we must know the state it would have if we had // To get an appender, we must know the state it would have if we had
// appended all existing data from scratch. We iterate through the end // appended all existing data from scratch. We iterate through the end
// and populate via the iterator's state. // and populate via the iterator's state.
for it.Next() { for it.Next() == ValHistogram {
} }
if err := it.Err(); err != nil { if err := it.Err(); err != nil {
return nil, err return nil, err
@ -385,16 +385,34 @@ func (a *HistogramAppender) AppendHistogram(t int64, h *histogram.Histogram) {
a.schema = h.Schema a.schema = h.Schema
a.zThreshold = h.ZeroThreshold a.zThreshold = h.ZeroThreshold
if len(h.PositiveSpans) > 0 {
a.pSpans = make([]histogram.Span, len(h.PositiveSpans)) a.pSpans = make([]histogram.Span, len(h.PositiveSpans))
copy(a.pSpans, h.PositiveSpans) copy(a.pSpans, h.PositiveSpans)
} else {
a.pSpans = nil
}
if len(h.NegativeSpans) > 0 {
a.nSpans = make([]histogram.Span, len(h.NegativeSpans)) a.nSpans = make([]histogram.Span, len(h.NegativeSpans))
copy(a.nSpans, h.NegativeSpans) copy(a.nSpans, h.NegativeSpans)
} else {
a.nSpans = nil
}
numPBuckets, numNBuckets := countSpans(h.PositiveSpans), countSpans(h.NegativeSpans) numPBuckets, numNBuckets := countSpans(h.PositiveSpans), countSpans(h.NegativeSpans)
if numPBuckets > 0 {
a.pBuckets = make([]int64, numPBuckets) a.pBuckets = make([]int64, numPBuckets)
a.nBuckets = make([]int64, numNBuckets)
a.pBucketsDelta = make([]int64, numPBuckets) a.pBucketsDelta = make([]int64, numPBuckets)
} else {
a.pBuckets = nil
a.pBucketsDelta = nil
}
if numNBuckets > 0 {
a.nBuckets = make([]int64, numNBuckets)
a.nBucketsDelta = make([]int64, numNBuckets) a.nBucketsDelta = make([]int64, numNBuckets)
} else {
a.nBuckets = nil
a.nBucketsDelta = nil
}
// Now store the actual data. // Now store the actual data.
putVarbitInt(a.b, t) putVarbitInt(a.b, t)
@ -505,15 +523,20 @@ func (a *HistogramAppender) Recode(
} }
numPositiveBuckets, numNegativeBuckets := countSpans(positiveSpans), countSpans(negativeSpans) numPositiveBuckets, numNegativeBuckets := countSpans(positiveSpans), countSpans(negativeSpans)
for it.Next() { for it.Next() == ValHistogram {
tOld, hOld := it.AtHistogram() tOld, hOld := it.AtHistogram()
// We have to newly allocate slices for the modified buckets // We have to newly allocate slices for the modified buckets
// here because they are kept by the appender until the next // here because they are kept by the appender until the next
// append. // append.
// TODO(beorn7): We might be able to optimize this. // TODO(beorn7): We might be able to optimize this.
positiveBuckets := make([]int64, numPositiveBuckets) var positiveBuckets, negativeBuckets []int64
negativeBuckets := make([]int64, numNegativeBuckets) if numPositiveBuckets > 0 {
positiveBuckets = make([]int64, numPositiveBuckets)
}
if numNegativeBuckets > 0 {
negativeBuckets = make([]int64, numNegativeBuckets)
}
// Save the modified histogram to the new chunk. // Save the modified histogram to the new chunk.
hOld.PositiveSpans, hOld.NegativeSpans = positiveSpans, negativeSpans hOld.PositiveSpans, hOld.NegativeSpans = positiveSpans, negativeSpans
@ -548,7 +571,8 @@ type histogramIterator struct {
t int64 t int64
cnt, zCnt uint64 cnt, zCnt uint64
tDelta, cntDelta, zCntDelta int64 tDelta, cntDelta, zCntDelta int64
pBuckets, nBuckets []int64 pBuckets, nBuckets []int64 // Delta between buckets.
pFloatBuckets, nFloatBuckets []float64 // Absolute counts.
pBucketsDelta, nBucketsDelta []int64 pBucketsDelta, nBucketsDelta []int64
// The sum is Gorilla xor encoded. // The sum is Gorilla xor encoded.
@ -556,34 +580,36 @@ type histogramIterator struct {
leading uint8 leading uint8
trailing uint8 trailing uint8
// Track calls to retrieve methods. Once they have been called, we
// cannot recycle the bucket slices anymore because we have returned
// them in the histogram.
atHistogramCalled, atFloatHistogramCalled bool
err error err error
} }
func (it *histogramIterator) Seek(t int64) bool { func (it *histogramIterator) Seek(t int64) ValueType {
if it.err != nil { if it.err != nil {
return false return ValNone
} }
for t > it.t || it.numRead == 0 { for t > it.t || it.numRead == 0 {
if !it.Next() { if it.Next() == ValNone {
return false return ValNone
} }
} }
return true return ValHistogram
} }
func (it *histogramIterator) At() (int64, float64) { func (it *histogramIterator) At() (int64, float64) {
panic("cannot call histogramIterator.At") panic("cannot call histogramIterator.At")
} }
func (it *histogramIterator) ChunkEncoding() Encoding {
return EncHistogram
}
func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) { func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) {
if value.IsStaleNaN(it.sum) { if value.IsStaleNaN(it.sum) {
return it.t, &histogram.Histogram{Sum: it.sum} return it.t, &histogram.Histogram{Sum: it.sum}
} }
it.atHistogramCalled = true
return it.t, &histogram.Histogram{ return it.t, &histogram.Histogram{
Count: it.cnt, Count: it.cnt,
ZeroCount: it.zCnt, ZeroCount: it.zCnt,
@ -597,6 +623,28 @@ func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) {
} }
} }
func (it *histogramIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
if value.IsStaleNaN(it.sum) {
return it.t, &histogram.FloatHistogram{Sum: it.sum}
}
it.atFloatHistogramCalled = true
return it.t, &histogram.FloatHistogram{
Count: float64(it.cnt),
ZeroCount: float64(it.zCnt),
Sum: it.sum,
ZeroThreshold: it.zThreshold,
Schema: it.schema,
PositiveSpans: it.pSpans,
NegativeSpans: it.nSpans,
PositiveBuckets: it.pFloatBuckets,
NegativeBuckets: it.nFloatBuckets,
}
}
func (it *histogramIterator) AtT() int64 {
return it.t
}
func (it *histogramIterator) Err() error { func (it *histogramIterator) Err() error {
return it.err return it.err
} }
@ -611,9 +659,24 @@ func (it *histogramIterator) Reset(b []byte) {
it.t, it.cnt, it.zCnt = 0, 0, 0 it.t, it.cnt, it.zCnt = 0, 0, 0
it.tDelta, it.cntDelta, it.zCntDelta = 0, 0, 0 it.tDelta, it.cntDelta, it.zCntDelta = 0, 0, 0
// Recycle slices that have not been returned yet. Otherwise, start from
// scratch.
if it.atHistogramCalled {
it.atHistogramCalled = false
it.pBuckets, it.nBuckets = nil, nil
} else {
it.pBuckets = it.pBuckets[:0] it.pBuckets = it.pBuckets[:0]
it.pBucketsDelta = it.pBucketsDelta[:0]
it.nBuckets = it.nBuckets[:0] it.nBuckets = it.nBuckets[:0]
}
if it.atFloatHistogramCalled {
it.atFloatHistogramCalled = false
it.pFloatBuckets, it.nFloatBuckets = nil, nil
} else {
it.pFloatBuckets = it.pFloatBuckets[:0]
it.nFloatBuckets = it.nFloatBuckets[:0]
}
it.pBucketsDelta = it.pBucketsDelta[:0]
it.pBucketsDelta = it.pBucketsDelta[:0] it.pBucketsDelta = it.pBucketsDelta[:0]
it.sum = 0 it.sum = 0
@ -622,9 +685,9 @@ func (it *histogramIterator) Reset(b []byte) {
it.err = nil it.err = nil
} }
func (it *histogramIterator) Next() bool { func (it *histogramIterator) Next() ValueType {
if it.err != nil || it.numRead == it.numTotal { if it.err != nil || it.numRead == it.numTotal {
return false return ValNone
} }
if it.numRead == 0 { if it.numRead == 0 {
@ -634,7 +697,7 @@ func (it *histogramIterator) Next() bool {
schema, zeroThreshold, posSpans, negSpans, err := readHistogramChunkLayout(&it.br) schema, zeroThreshold, posSpans, negSpans, err := readHistogramChunkLayout(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.schema = schema it.schema = schema
it.zThreshold = zeroThreshold it.zThreshold = zeroThreshold
@ -642,28 +705,32 @@ func (it *histogramIterator) Next() bool {
numPBuckets, numNBuckets := countSpans(posSpans), countSpans(negSpans) numPBuckets, numNBuckets := countSpans(posSpans), countSpans(negSpans)
// Allocate bucket slices as needed, recycling existing slices // Allocate bucket slices as needed, recycling existing slices
// in case this iterator was reset and already has slices of a // in case this iterator was reset and already has slices of a
// sufficient capacity.. // sufficient capacity.
if numPBuckets > 0 { if numPBuckets > 0 {
if cap(it.pBuckets) < numPBuckets { if cap(it.pBuckets) < numPBuckets {
it.pBuckets = make([]int64, numPBuckets) it.pBuckets = make([]int64, numPBuckets)
// If cap(it.pBuckets) isn't sufficient, neither is cap(it.pBucketsDelta). // If cap(it.pBuckets) isn't sufficient, neither is the cap of the others.
it.pBucketsDelta = make([]int64, numPBuckets) it.pBucketsDelta = make([]int64, numPBuckets)
it.pFloatBuckets = make([]float64, numPBuckets)
} else { } else {
for i := 0; i < numPBuckets; i++ { for i := 0; i < numPBuckets; i++ {
it.pBuckets = append(it.pBuckets, 0) it.pBuckets = append(it.pBuckets, 0)
it.pBucketsDelta = append(it.pBucketsDelta, 0) it.pBucketsDelta = append(it.pBucketsDelta, 0)
it.pFloatBuckets = append(it.pFloatBuckets, 0)
} }
} }
} }
if numNBuckets > 0 { if numNBuckets > 0 {
if cap(it.nBuckets) < numNBuckets { if cap(it.nBuckets) < numNBuckets {
it.nBuckets = make([]int64, numNBuckets) it.nBuckets = make([]int64, numNBuckets)
// If cap(it.nBuckets) isn't sufficient, neither is cap(it.nBucketsDelta). // If cap(it.nBuckets) isn't sufficient, neither is the cap of the others.
it.nBucketsDelta = make([]int64, numNBuckets) it.nBucketsDelta = make([]int64, numNBuckets)
it.nFloatBuckets = make([]float64, numNBuckets)
} else { } else {
for i := 0; i < numNBuckets; i++ { for i := 0; i < numNBuckets; i++ {
it.nBuckets = append(it.nBuckets, 0) it.nBuckets = append(it.nBuckets, 0)
it.nBucketsDelta = append(it.nBucketsDelta, 0) it.nBucketsDelta = append(it.nBucketsDelta, 0)
it.pFloatBuckets = append(it.pFloatBuckets, 0)
} }
} }
} }
@ -672,28 +739,28 @@ func (it *histogramIterator) Next() bool {
t, err := readVarbitInt(&it.br) t, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.t = t it.t = t
cnt, err := readVarbitUint(&it.br) cnt, err := readVarbitUint(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.cnt = cnt it.cnt = cnt
zcnt, err := readVarbitUint(&it.br) zcnt, err := readVarbitUint(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.zCnt = zcnt it.zCnt = zcnt
sum, err := it.br.readBits(64) sum, err := it.br.readBits(64)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.sum = math.Float64frombits(sum) it.sum = math.Float64frombits(sum)
@ -701,28 +768,64 @@ func (it *histogramIterator) Next() bool {
v, err := readVarbitInt(&it.br) v, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.pBuckets[i] = v it.pBuckets[i] = v
it.pFloatBuckets[i] = float64(v)
} }
for i := range it.nBuckets { for i := range it.nBuckets {
v, err := readVarbitInt(&it.br) v, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.nBuckets[i] = v it.nBuckets[i] = v
it.nFloatBuckets[i] = float64(v)
} }
it.numRead++ it.numRead++
return true return ValHistogram
}
// Recycle bucket slices that have not been returned yet. Otherwise,
// copy them.
if it.atHistogramCalled {
it.atHistogramCalled = false
if len(it.pBuckets) > 0 {
newBuckets := make([]int64, len(it.pBuckets))
copy(newBuckets, it.pBuckets)
it.pBuckets = newBuckets
} else {
it.pBuckets = nil
}
if len(it.nBuckets) > 0 {
newBuckets := make([]int64, len(it.nBuckets))
copy(newBuckets, it.nBuckets)
it.nBuckets = newBuckets
} else {
it.nBuckets = nil
}
}
// FloatBuckets are set from scratch, so simply create empty ones.
if it.atFloatHistogramCalled {
it.atFloatHistogramCalled = false
if len(it.pFloatBuckets) > 0 {
it.pFloatBuckets = make([]float64, len(it.pFloatBuckets))
} else {
it.pFloatBuckets = nil
}
if len(it.nFloatBuckets) > 0 {
it.nFloatBuckets = make([]float64, len(it.nFloatBuckets))
} else {
it.nFloatBuckets = nil
}
} }
if it.numRead == 1 { if it.numRead == 1 {
tDelta, err := readVarbitUint(&it.br) tDelta, err := readVarbitUint(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.tDelta = int64(tDelta) it.tDelta = int64(tDelta)
it.t += it.tDelta it.t += it.tDelta
@ -730,7 +833,7 @@ func (it *histogramIterator) Next() bool {
cntDelta, err := readVarbitInt(&it.br) cntDelta, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.cntDelta = cntDelta it.cntDelta = cntDelta
it.cnt = uint64(int64(it.cnt) + it.cntDelta) it.cnt = uint64(int64(it.cnt) + it.cntDelta)
@ -738,49 +841,55 @@ func (it *histogramIterator) Next() bool {
zcntDelta, err := readVarbitInt(&it.br) zcntDelta, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.zCntDelta = zcntDelta it.zCntDelta = zcntDelta
it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta) it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta)
ok := it.readSum() ok := it.readSum()
if !ok { if !ok {
return false return ValNone
} }
if value.IsStaleNaN(it.sum) { if value.IsStaleNaN(it.sum) {
it.numRead++ it.numRead++
return true return ValHistogram
} }
var current int64
for i := range it.pBuckets { for i := range it.pBuckets {
delta, err := readVarbitInt(&it.br) delta, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.pBucketsDelta[i] = delta it.pBucketsDelta[i] = delta
it.pBuckets[i] = it.pBuckets[i] + delta it.pBuckets[i] += delta
current += it.pBuckets[i]
it.pFloatBuckets[i] = float64(current)
} }
current = 0
for i := range it.nBuckets { for i := range it.nBuckets {
delta, err := readVarbitInt(&it.br) delta, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.nBucketsDelta[i] = delta it.nBucketsDelta[i] = delta
it.nBuckets[i] = it.nBuckets[i] + delta it.nBuckets[i] += delta
current += it.nBuckets[i]
it.nFloatBuckets[i] = float64(current)
} }
it.numRead++ it.numRead++
return true return ValHistogram
} }
tDod, err := readVarbitInt(&it.br) tDod, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.tDelta = it.tDelta + tDod it.tDelta = it.tDelta + tDod
it.t += it.tDelta it.t += it.tDelta
@ -788,7 +897,7 @@ func (it *histogramIterator) Next() bool {
cntDod, err := readVarbitInt(&it.br) cntDod, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.cntDelta = it.cntDelta + cntDod it.cntDelta = it.cntDelta + cntDod
it.cnt = uint64(int64(it.cnt) + it.cntDelta) it.cnt = uint64(int64(it.cnt) + it.cntDelta)
@ -796,43 +905,49 @@ func (it *histogramIterator) Next() bool {
zcntDod, err := readVarbitInt(&it.br) zcntDod, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.zCntDelta = it.zCntDelta + zcntDod it.zCntDelta = it.zCntDelta + zcntDod
it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta) it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta)
ok := it.readSum() ok := it.readSum()
if !ok { if !ok {
return false return ValNone
} }
if value.IsStaleNaN(it.sum) { if value.IsStaleNaN(it.sum) {
it.numRead++ it.numRead++
return true return ValHistogram
} }
var current int64
for i := range it.pBuckets { for i := range it.pBuckets {
dod, err := readVarbitInt(&it.br) dod, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.pBucketsDelta[i] = it.pBucketsDelta[i] + dod it.pBucketsDelta[i] += dod
it.pBuckets[i] = it.pBuckets[i] + it.pBucketsDelta[i] it.pBuckets[i] += it.pBucketsDelta[i]
current += it.pBuckets[i]
it.pFloatBuckets[i] = float64(current)
} }
current = 0
for i := range it.nBuckets { for i := range it.nBuckets {
dod, err := readVarbitInt(&it.br) dod, err := readVarbitInt(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.nBucketsDelta[i] = it.nBucketsDelta[i] + dod it.nBucketsDelta[i] += dod
it.nBuckets[i] = it.nBuckets[i] + it.nBucketsDelta[i] it.nBuckets[i] += it.nBucketsDelta[i]
current += it.nBuckets[i]
it.nFloatBuckets[i] = float64(current)
} }
it.numRead++ it.numRead++
return true return ValHistogram
} }
func (it *histogramIterator) readSum() bool { func (it *histogramIterator) readSum() bool {

View File

@ -76,9 +76,9 @@ func TestHistogramChunkSameBuckets(t *testing.T) {
it := c.iterator(nil) it := c.iterator(nil)
require.NoError(t, it.Err()) require.NoError(t, it.Err())
var act []res var act []res
for it.Next() { for it.Next() == ValHistogram {
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
act = append(act, res{t: ts, h: h.Copy()}) act = append(act, res{t: ts, h: h})
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())
require.Equal(t, exp, act) require.Equal(t, exp, act)
@ -188,9 +188,9 @@ func TestHistogramChunkBucketChanges(t *testing.T) {
} }
it := c.Iterator(nil) it := c.Iterator(nil)
var act []res var act []res
for it.Next() { for it.Next() == ValHistogram {
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
act = append(act, res{t: ts, h: h.Copy()}) act = append(act, res{t: ts, h: h})
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())
require.Equal(t, exp, act) require.Equal(t, exp, act)

View File

@ -97,7 +97,7 @@ func (c *XORChunk) Appender() (Appender, error) {
// To get an appender we must know the state it would have if we had // To get an appender we must know the state it would have if we had
// appended all existing data from scratch. // appended all existing data from scratch.
// We iterate through the end and populate via the iterator's state. // We iterate through the end and populate via the iterator's state.
for it.Next() { for it.Next() != ValNone {
} }
if err := it.Err(); err != nil { if err := it.Err(); err != nil {
return nil, err return nil, err
@ -238,17 +238,17 @@ type xorIterator struct {
err error err error
} }
func (it *xorIterator) Seek(t int64) bool { func (it *xorIterator) Seek(t int64) ValueType {
if it.err != nil { if it.err != nil {
return false return ValNone
} }
for t > it.t || it.numRead == 0 { for t > it.t || it.numRead == 0 {
if !it.Next() { if it.Next() == ValNone {
return false return ValNone
} }
} }
return true return ValFloat
} }
func (it *xorIterator) At() (int64, float64) { func (it *xorIterator) At() (int64, float64) {
@ -259,8 +259,12 @@ func (it *xorIterator) AtHistogram() (int64, *histogram.Histogram) {
panic("cannot call xorIterator.AtHistogram") panic("cannot call xorIterator.AtHistogram")
} }
func (it *xorIterator) ChunkEncoding() Encoding { func (it *xorIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return EncXOR panic("cannot call xorIterator.AtFloatHistogram")
}
func (it *xorIterator) AtT() int64 {
return it.t
} }
func (it *xorIterator) Err() error { func (it *xorIterator) Err() error {
@ -282,33 +286,33 @@ func (it *xorIterator) Reset(b []byte) {
it.err = nil it.err = nil
} }
func (it *xorIterator) Next() bool { func (it *xorIterator) Next() ValueType {
if it.err != nil || it.numRead == it.numTotal { if it.err != nil || it.numRead == it.numTotal {
return false return ValNone
} }
if it.numRead == 0 { if it.numRead == 0 {
t, err := binary.ReadVarint(&it.br) t, err := binary.ReadVarint(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
v, err := it.br.readBits(64) v, err := it.br.readBits(64)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.t = t it.t = t
it.val = math.Float64frombits(v) it.val = math.Float64frombits(v)
it.numRead++ it.numRead++
return true return ValFloat
} }
if it.numRead == 1 { if it.numRead == 1 {
tDelta, err := binary.ReadUvarint(&it.br) tDelta, err := binary.ReadUvarint(&it.br)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.tDelta = tDelta it.tDelta = tDelta
it.t = it.t + int64(it.tDelta) it.t = it.t + int64(it.tDelta)
@ -326,7 +330,7 @@ func (it *xorIterator) Next() bool {
} }
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
if bit == zero { if bit == zero {
break break
@ -349,7 +353,7 @@ func (it *xorIterator) Next() bool {
bits, err := it.br.readBits(64) bits, err := it.br.readBits(64)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
dod = int64(bits) dod = int64(bits)
@ -362,7 +366,7 @@ func (it *xorIterator) Next() bool {
} }
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
// Account for negative numbers, which come back as high unsigned numbers. // Account for negative numbers, which come back as high unsigned numbers.
@ -379,15 +383,15 @@ func (it *xorIterator) Next() bool {
return it.readValue() return it.readValue()
} }
func (it *xorIterator) readValue() bool { func (it *xorIterator) readValue() ValueType {
val, leading, trailing, err := xorRead(&it.br, it.val, it.leading, it.trailing) val, leading, trailing, err := xorRead(&it.br, it.val, it.leading, it.trailing)
if err != nil { if err != nil {
it.err = err it.err = err
return false return ValNone
} }
it.val, it.leading, it.trailing = val, leading, trailing it.val, it.leading, it.trailing = val, leading, trailing
it.numRead++ it.numRead++
return true return ValFloat
} }
func xorWrite( func xorWrite(

View File

@ -977,7 +977,7 @@ func TestCompaction_populateBlock(t *testing.T) {
firstTs int64 = math.MaxInt64 firstTs int64 = math.MaxInt64
s sample s sample
) )
for iter.Next() { for iter.Next() == chunkenc.ValFloat {
s.t, s.v = iter.At() s.t, s.v = iter.At()
if firstTs == math.MaxInt64 { if firstTs == math.MaxInt64 {
firstTs = s.t firstTs = s.t
@ -1373,9 +1373,10 @@ func TestHeadCompactionWithHistograms(t *testing.T) {
it := s.Iterator() it := s.Iterator()
actHists := make([]timedHistogram, 0, len(expHists)) actHists := make([]timedHistogram, 0, len(expHists))
for it.Next() { for it.Next() == chunkenc.ValHistogram {
// TODO(beorn7): Test mixed series?
t, h := it.AtHistogram() t, h := it.AtHistogram()
actHists = append(actHists, timedHistogram{t, h.Copy()}) actHists = append(actHists, timedHistogram{t, h})
} }
require.Equal(t, expHists, actHists) require.Equal(t, expHists, actHists)
@ -1744,7 +1745,7 @@ func TestSparseHistogramCompactionAndQuery(t *testing.T) {
for ss.Next() { for ss.Next() {
s := ss.At() s := ss.At()
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() == chunkenc.ValHistogram {
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
actHists[s.Labels().String()] = append(actHists[s.Labels().String()], timedHistogram{ts, h.Copy()}) actHists[s.Labels().String()] = append(actHists[s.Labels().String()], timedHistogram{ts, h.Copy()})
} }

View File

@ -89,7 +89,8 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str
samples := []tsdbutil.Sample{} samples := []tsdbutil.Sample{}
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
// TODO(beorn7): Also handle histograms.
t, v := it.At() t, v := it.At()
samples = append(samples, sample{t: t, v: v}) samples = append(samples, sample{t: t, v: v})
} }
@ -420,7 +421,7 @@ Outer:
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil}) expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -536,7 +537,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}}, labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}},
}, ssMap) }, ssMap)
// Append Out of Order Value. // Append Out of Order Value.
@ -553,7 +554,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b"))
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}, sample{10, 3, nil}}, labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}},
}, ssMap) }, ssMap)
} }
@ -594,7 +595,7 @@ func TestDB_Snapshot(t *testing.T) {
sum := 0.0 sum := 0.0
for seriesSet.Next() { for seriesSet.Next() {
series := seriesSet.At().Iterator() series := seriesSet.At().Iterator()
for series.Next() { for series.Next() == chunkenc.ValFloat {
_, v := series.At() _, v := series.At()
sum += v sum += v
} }
@ -646,7 +647,7 @@ func TestDB_Snapshot_ChunksOutsideOfCompactedRange(t *testing.T) {
sum := 0.0 sum := 0.0
for seriesSet.Next() { for seriesSet.Next() {
series := seriesSet.At().Iterator() series := seriesSet.At().Iterator()
for series.Next() { for series.Next() == chunkenc.ValFloat {
_, v := series.At() _, v := series.At()
sum += v sum += v
} }
@ -716,7 +717,7 @@ Outer:
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil}) expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -821,7 +822,7 @@ func TestDB_e2e(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
v := rand.Float64() v := rand.Float64()
series = append(series, sample{ts, v, nil}) series = append(series, sample{ts, v, nil, nil})
_, err := app.Append(0, lset, ts, v) _, err := app.Append(0, lset, ts, v)
require.NoError(t, err) require.NoError(t, err)
@ -1066,7 +1067,7 @@ func TestTombstoneClean(t *testing.T) {
expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) expSamples := make([]tsdbutil.Sample, 0, len(c.remaint))
for _, ts := range c.remaint { for _, ts := range c.remaint {
expSamples = append(expSamples, sample{ts, smpls[ts], nil}) expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil})
} }
expss := newMockSeriesSet([]storage.Series{ expss := newMockSeriesSet([]storage.Series{
@ -1363,7 +1364,7 @@ func TestSizeRetention(t *testing.T) {
series := genSeries(100, 10, m.MinTime, m.MaxTime+1) series := genSeries(100, 10, m.MinTime, m.MaxTime+1)
for _, s := range series { for _, s := range series {
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
tim, v := it.At() tim, v := it.At()
_, err := headApp.Append(0, s.Labels(), tim, v) _, err := headApp.Append(0, s.Labels(), tim, v)
require.NoError(t, err) require.NoError(t, err)
@ -1550,7 +1551,7 @@ func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample
series := ss.At() series := ss.At()
samples := []sample{} samples := []sample{}
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
samples = append(samples, sample{t: t, v: v}) samples = append(samples, sample{t: t, v: v})
} }
@ -2395,7 +2396,7 @@ func TestDBReadOnly_FlushWAL(t *testing.T) {
sum := 0.0 sum := 0.0
for seriesSet.Next() { for seriesSet.Next() {
series := seriesSet.At().Iterator() series := seriesSet.At().Iterator()
for series.Next() { for series.Next() == chunkenc.ValFloat {
_, v := series.At() _, v := series.At()
sum += v sum += v
} }
@ -2545,11 +2546,11 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
// that the resulted segments includes the expected chunks data. // that the resulted segments includes the expected chunks data.
func TestChunkWriter_ReadAfterWrite(t *testing.T) { func TestChunkWriter_ReadAfterWrite(t *testing.T) {
chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}}) chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}})
chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}}) chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}})
chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}}) chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}})
chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}}) chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}})
chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}}) chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}})
chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size
tests := []struct { tests := []struct {
@ -2749,11 +2750,11 @@ func TestRangeForTimestamp(t *testing.T) {
// Regression test for https://github.com/prometheus/prometheus/pull/6514. // Regression test for https://github.com/prometheus/prometheus/pull/6514.
func TestChunkReader_ConcurrentReads(t *testing.T) { func TestChunkReader_ConcurrentReads(t *testing.T) {
chks := []chunks.Meta{ chks := []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}}), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}}),
} }
tempDir, err := ioutil.TempDir("", "test_chunk_writer") tempDir, err := ioutil.TempDir("", "test_chunk_writer")
@ -2818,7 +2819,7 @@ func TestCompactHead(t *testing.T) {
val := rand.Float64() val := rand.Float64()
_, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val) _, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val)
require.NoError(t, err) require.NoError(t, err)
expSamples = append(expSamples, sample{int64(i), val, nil}) expSamples = append(expSamples, sample{int64(i), val, nil, nil})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -2843,9 +2844,9 @@ func TestCompactHead(t *testing.T) {
for seriesSet.Next() { for seriesSet.Next() {
series := seriesSet.At().Iterator() series := seriesSet.At().Iterator()
for series.Next() { for series.Next() == chunkenc.ValFloat {
time, val := series.At() time, val := series.At()
actSamples = append(actSamples, sample{int64(time), val, nil}) actSamples = append(actSamples, sample{int64(time), val, nil, nil})
} }
require.NoError(t, series.Err()) require.NoError(t, series.Err())
} }
@ -3259,7 +3260,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t
var sum float64 var sum float64
var firstErr error var firstErr error
for _, it := range iterators { for _, it := range iterators {
for it.Next() { for it.Next() == chunkenc.ValFloat {
_, v := it.At() _, v := it.At()
sum += v sum += v
} }

View File

@ -23,6 +23,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
func TestExample(t *testing.T) { func TestExample(t *testing.T) {
@ -44,7 +45,7 @@ func TestExample(t *testing.T) {
ts, v := time.Now().Unix(), 123.0 ts, v := time.Now().Unix(), 123.0
ref, err := app.Append(0, lbls, ts, v) ref, err := app.Append(0, lbls, ts, v)
require.NoError(t, err) require.NoError(t, err)
appendedSamples = append(appendedSamples, sample{ts, v, nil}) appendedSamples = append(appendedSamples, sample{ts, v, nil, nil})
// Another append for a second later. // Another append for a second later.
// Re-using the ref from above since it's the same series, makes append faster. // Re-using the ref from above since it's the same series, makes append faster.
@ -52,7 +53,7 @@ func TestExample(t *testing.T) {
ts, v = time.Now().Unix(), 124 ts, v = time.Now().Unix(), 124
_, err = app.Append(ref, lbls, ts, v) _, err = app.Append(ref, lbls, ts, v)
require.NoError(t, err) require.NoError(t, err)
appendedSamples = append(appendedSamples, sample{ts, v, nil}) appendedSamples = append(appendedSamples, sample{ts, v, nil, nil})
// Commit to storage. // Commit to storage.
err = app.Commit() err = app.Commit()
@ -79,10 +80,10 @@ func TestExample(t *testing.T) {
fmt.Println("series:", series.Labels().String()) fmt.Println("series:", series.Labels().String())
it := series.Iterator() it := series.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
fmt.Println("sample", ts, v) fmt.Println("sample", ts, v)
queriedSamples = append(queriedSamples, sample{ts, v, nil}) queriedSamples = append(queriedSamples, sample{ts, v, nil, nil})
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())

View File

@ -274,7 +274,6 @@ type headMetrics struct {
// Sparse histogram metrics for experiments. // Sparse histogram metrics for experiments.
// TODO: remove these in the final version. // TODO: remove these in the final version.
histogramSamplesTotal prometheus.Counter histogramSamplesTotal prometheus.Counter
histogramSeries prometheus.Gauge
} }
func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics { func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics {
@ -377,10 +376,6 @@ func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics {
Name: "prometheus_tsdb_histogram_samples_total", Name: "prometheus_tsdb_histogram_samples_total",
Help: "Total number of histograms samples added.", Help: "Total number of histograms samples added.",
}), }),
histogramSeries: prometheus.NewGauge(prometheus.GaugeOpts{
Name: "prometheus_tsdb_histogram_series",
Help: "Number of histogram series currently present in the head block.",
}),
} }
if r != nil { if r != nil {
@ -409,7 +404,6 @@ func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics {
m.mmapChunkCorruptionTotal, m.mmapChunkCorruptionTotal,
m.snapshotReplayErrorTotal, m.snapshotReplayErrorTotal,
m.histogramSamplesTotal, m.histogramSamplesTotal,
m.histogramSeries,
// Metrics bound to functions and not needed in tests // Metrics bound to functions and not needed in tests
// can be created and registered on the spot. // can be created and registered on the spot.
prometheus.NewGaugeFunc(prometheus.GaugeOpts{ prometheus.NewGaugeFunc(prometheus.GaugeOpts{
@ -607,21 +601,6 @@ func (h *Head) Init(minValidTime int64) error {
h.updateWALReplayStatusRead(i) h.updateWALReplayStatusRead(i)
} }
{
// Set the sparseHistogramSeries metric once replay is done.
// This is a temporary hack.
// TODO: remove this hack and do it while replaying WAL if we keep this metric around.
sparseHistogramSeries := 0
for _, m := range h.series.series {
for _, ms := range m {
if ms.isHistogramSeries {
sparseHistogramSeries++
}
}
}
h.metrics.histogramSeries.Set(float64(sparseHistogramSeries))
}
walReplayDuration := time.Since(start) walReplayDuration := time.Since(start)
h.metrics.walTotalReplayDuration.Set(walReplayDuration.Seconds()) h.metrics.walTotalReplayDuration.Set(walReplayDuration.Seconds())
level.Info(h.logger).Log( level.Info(h.logger).Log(
@ -1142,13 +1121,12 @@ func (h *Head) gc() int64 {
// Drop old chunks and remember series IDs and hashes if they can be // Drop old chunks and remember series IDs and hashes if they can be
// deleted entirely. // deleted entirely.
deleted, chunksRemoved, actualMint, sparseHistogramSeriesDeleted := h.series.gc(mint) deleted, chunksRemoved, actualMint := h.series.gc(mint)
seriesRemoved := len(deleted) seriesRemoved := len(deleted)
h.metrics.seriesRemoved.Add(float64(seriesRemoved)) h.metrics.seriesRemoved.Add(float64(seriesRemoved))
h.metrics.chunksRemoved.Add(float64(chunksRemoved)) h.metrics.chunksRemoved.Add(float64(chunksRemoved))
h.metrics.chunks.Sub(float64(chunksRemoved)) h.metrics.chunks.Sub(float64(chunksRemoved))
h.metrics.histogramSeries.Sub(float64(sparseHistogramSeriesDeleted))
h.numSeries.Sub(uint64(seriesRemoved)) h.numSeries.Sub(uint64(seriesRemoved))
// Remove deleted series IDs from the postings lists. // Remove deleted series IDs from the postings lists.
@ -1366,13 +1344,12 @@ func newStripeSeries(stripeSize int, seriesCallback SeriesLifecycleCallback) *st
// note: returning map[chunks.HeadSeriesRef]struct{} would be more accurate, // note: returning map[chunks.HeadSeriesRef]struct{} would be more accurate,
// but the returned map goes into postings.Delete() which expects a map[storage.SeriesRef]struct // but the returned map goes into postings.Delete() which expects a map[storage.SeriesRef]struct
// and there's no easy way to cast maps. // and there's no easy way to cast maps.
func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int64, int) { func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int64) {
var ( var (
deleted = map[storage.SeriesRef]struct{}{} deleted = map[storage.SeriesRef]struct{}{}
deletedForCallback = []labels.Labels{} deletedForCallback = []labels.Labels{}
rmChunks = 0 rmChunks = 0
actualMint int64 = math.MaxInt64 actualMint int64 = math.MaxInt64
sparseHistogramSeriesDeleted = 0
) )
// Run through all series and truncate old chunks. Mark those with no // Run through all series and truncate old chunks. Mark those with no
// chunks left as deleted and store their ID. // chunks left as deleted and store their ID.
@ -1404,9 +1381,6 @@ func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int6
s.locks[j].Lock() s.locks[j].Lock()
} }
if series.isHistogramSeries {
sparseHistogramSeriesDeleted++
}
deleted[storage.SeriesRef(series.ref)] = struct{}{} deleted[storage.SeriesRef(series.ref)] = struct{}{}
s.hashes[i].del(hash, series.lset) s.hashes[i].del(hash, series.lset)
delete(s.series[j], series.ref) delete(s.series[j], series.ref)
@ -1430,7 +1404,7 @@ func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int6
actualMint = mint actualMint = mint
} }
return deleted, rmChunks, actualMint, sparseHistogramSeriesDeleted return deleted, rmChunks, actualMint
} }
func (s *stripeSeries) getByID(id chunks.HeadSeriesRef) *memSeries { func (s *stripeSeries) getByID(id chunks.HeadSeriesRef) *memSeries {
@ -1495,22 +1469,32 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu
return series, true, nil return series, true, nil
} }
type histogramSample struct {
t int64
h *histogram.Histogram
}
type sample struct { type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram
} }
func newSample(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} } func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample {
return sample{t, v, h, fh}
}
func (s sample) T() int64 { return s.t } func (s sample) T() int64 { return s.t }
func (s sample) V() float64 { return s.v } func (s sample) V() float64 { return s.v }
func (s sample) H() *histogram.Histogram { return s.h } func (s sample) H() *histogram.Histogram { return s.h }
func (s sample) FH() *histogram.FloatHistogram { return s.fh }
func (s sample) Type() chunkenc.ValueType {
switch {
case s.h != nil:
return chunkenc.ValHistogram
case s.fh != nil:
return chunkenc.ValFloatHistogram
default:
return chunkenc.ValFloat
}
}
// memSeries is the in-memory representation of a series. None of its methods // memSeries is the in-memory representation of a series. None of its methods
// are goroutine safe and it is the caller's responsibility to lock it. // are goroutine safe and it is the caller's responsibility to lock it.
@ -1541,7 +1525,6 @@ type memSeries struct {
// We keep the last 4 samples here (in addition to appending them to the chunk) so we don't need coordination between appender and querier. // We keep the last 4 samples here (in addition to appending them to the chunk) so we don't need coordination between appender and querier.
// Even the most compact encoding of a sample takes 2 bits, so the last byte is not contended. // Even the most compact encoding of a sample takes 2 bits, so the last byte is not contended.
sampleBuf [4]sample sampleBuf [4]sample
histogramBuf [4]histogramSample
pendingCommit bool // Whether there are samples waiting to be committed to this series. pendingCommit bool // Whether there are samples waiting to be committed to this series.
@ -1554,6 +1537,8 @@ type memSeries struct {
txs *txRing txs *txRing
// TODO(beorn7): The only reason we track this is to create a staleness
// marker as either histogram or float sample. Perhaps there is a better way.
isHistogramSeries bool isHistogramSeries bool
} }

View File

@ -416,7 +416,6 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels
} }
s.isHistogramSeries = true s.isHistogramSeries = true
if created { if created {
a.head.metrics.histogramSeries.Inc()
a.series = append(a.series, record.RefSeries{ a.series = append(a.series, record.RefSeries{
Ref: s.ref, Ref: s.ref,
Labels: lset, Labels: lset,
@ -607,7 +606,6 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, chunkDiskMapper
if !sampleInOrder { if !sampleInOrder {
return sampleInOrder, chunkCreated return sampleInOrder, chunkCreated
} }
s.app.Append(t, v) s.app.Append(t, v)
s.isHistogramSeries = false s.isHistogramSeries = false
@ -683,10 +681,10 @@ func (s *memSeries) appendHistogram(t int64, h *histogram.Histogram, appendID ui
c.maxTime = t c.maxTime = t
s.histogramBuf[0] = s.histogramBuf[1] s.sampleBuf[0] = s.sampleBuf[1]
s.histogramBuf[1] = s.histogramBuf[2] s.sampleBuf[1] = s.sampleBuf[2]
s.histogramBuf[2] = s.histogramBuf[3] s.sampleBuf[2] = s.sampleBuf[3]
s.histogramBuf[3] = histogramSample{t: t, h: h} s.sampleBuf[3] = sample{t: t, h: h}
if appendID > 0 { if appendID > 0 {
s.txs.add(appendID) s.txs.add(appendID)

View File

@ -428,8 +428,6 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch
msIter.total = numSamples msIter.total = numSamples
msIter.stopAfter = stopAfter msIter.stopAfter = stopAfter
msIter.buf = s.sampleBuf msIter.buf = s.sampleBuf
msIter.histogramBuf = s.histogramBuf
msIter.isHistogramSeries = s.isHistogramSeries
return msIter return msIter
} }
return &memSafeIterator{ return &memSafeIterator{
@ -440,8 +438,6 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch
}, },
total: numSamples, total: numSamples,
buf: s.sampleBuf, buf: s.sampleBuf,
histogramBuf: s.histogramBuf,
isHistogramSeries: s.isHistogramSeries,
} }
} }
@ -450,52 +446,50 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch
type memSafeIterator struct { type memSafeIterator struct {
stopIterator stopIterator
isHistogramSeries bool
total int total int
buf [4]sample buf [4]sample
histogramBuf [4]histogramSample
} }
func (it *memSafeIterator) Seek(t int64) bool { func (it *memSafeIterator) Seek(t int64) chunkenc.ValueType {
if it.Err() != nil { if it.Err() != nil {
return false return chunkenc.ValNone
} }
var ts int64 var valueType chunkenc.ValueType
if it.isHistogramSeries { var ts int64 = math.MinInt64
ts, _ = it.AtHistogram()
} else { if it.i > -1 {
ts, _ = it.At() ts = it.AtT()
}
if t <= ts {
// We are already at the right sample, but we have to find out
// its ValueType.
if it.total-it.i > 4 {
return it.Iterator.Seek(ts)
}
return it.buf[4-(it.total-it.i)].Type()
} }
if it.isHistogramSeries {
for t > ts || it.i == -1 { for t > ts || it.i == -1 {
if !it.Next() { if valueType = it.Next(); valueType == chunkenc.ValNone {
return false return chunkenc.ValNone
}
ts, _ = it.AtHistogram()
}
} else {
for t > ts || it.i == -1 {
if !it.Next() {
return false
}
ts, _ = it.At()
} }
ts = it.AtT()
} }
return true return valueType
} }
func (it *memSafeIterator) Next() bool { func (it *memSafeIterator) Next() chunkenc.ValueType {
if it.i+1 >= it.stopAfter { if it.i+1 >= it.stopAfter {
return false return chunkenc.ValNone
} }
it.i++ it.i++
if it.total-it.i > 4 { if it.total-it.i > 4 {
return it.Iterator.Next() return it.Iterator.Next()
} }
return true return it.buf[4-(it.total-it.i)].Type()
} }
func (it *memSafeIterator) At() (int64, float64) { func (it *memSafeIterator) At() (int64, float64) {
@ -510,10 +504,29 @@ func (it *memSafeIterator) AtHistogram() (int64, *histogram.Histogram) {
if it.total-it.i > 4 { if it.total-it.i > 4 {
return it.Iterator.AtHistogram() return it.Iterator.AtHistogram()
} }
s := it.histogramBuf[4-(it.total-it.i)] s := it.buf[4-(it.total-it.i)]
return s.t, s.h return s.t, s.h
} }
func (it *memSafeIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
if it.total-it.i > 4 {
return it.Iterator.AtFloatHistogram()
}
s := it.buf[4-(it.total-it.i)]
if s.fh != nil {
return s.t, s.fh
}
return s.t, s.h.ToFloat()
}
func (it *memSafeIterator) AtT() int64 {
if it.total-it.i > 4 {
return it.Iterator.AtT()
}
s := it.buf[4-(it.total-it.i)]
return s.t
}
// stopIterator wraps an Iterator, but only returns the first // stopIterator wraps an Iterator, but only returns the first
// stopAfter values, if initialized with i=-1. // stopAfter values, if initialized with i=-1.
type stopIterator struct { type stopIterator struct {
@ -522,9 +535,9 @@ type stopIterator struct {
i, stopAfter int i, stopAfter int
} }
func (it *stopIterator) Next() bool { func (it *stopIterator) Next() chunkenc.ValueType {
if it.i+1 >= it.stopAfter { if it.i+1 >= it.stopAfter {
return false return chunkenc.ValNone
} }
it.i++ it.i++
return it.Iterator.Next() return it.Iterator.Next()

View File

@ -324,18 +324,18 @@ func TestHead_ReadWAL(t *testing.T) {
require.Equal(t, labels.FromStrings("a", "3"), s100.lset) require.Equal(t, labels.FromStrings("a", "3"), s100.lset)
expandChunk := func(c chunkenc.Iterator) (x []sample) { expandChunk := func(c chunkenc.Iterator) (x []sample) {
for c.Next() { for c.Next() == chunkenc.ValFloat {
t, v := c.At() t, v := c.At()
x = append(x, sample{t: t, v: v}) x = append(x, sample{t: t, v: v})
} }
require.NoError(t, c.Err()) require.NoError(t, c.Err())
return x return x
} }
require.Equal(t, []sample{{100, 2, nil}, {101, 5, nil}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{100, 2, nil, nil}, {101, 5, nil, nil}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil)))
require.Equal(t, []sample{{101, 6, nil}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{101, 6, nil, nil}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil)))
// The samples before the new series record should be discarded since a duplicate record // The samples before the new series record should be discarded since a duplicate record
// is only possible when old samples were compacted. // is only possible when old samples were compacted.
require.Equal(t, []sample{{101, 7, nil}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil))) require.Equal(t, []sample{{101, 7, nil, nil}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil)))
q, err := head.ExemplarQuerier(context.Background()) q, err := head.ExemplarQuerier(context.Background())
require.NoError(t, err) require.NoError(t, err)
@ -401,8 +401,8 @@ func TestHead_WALMultiRef(t *testing.T) {
// The samples before the new ref should be discarded since Head truncation // The samples before the new ref should be discarded since Head truncation
// happens only after compacting the Head. // happens only after compacting the Head.
require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: { require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {
sample{1700, 3, nil}, sample{1700, 3, nil, nil},
sample{2000, 4, nil}, sample{2000, 4, nil, nil},
}}, series) }}, series)
} }
@ -779,7 +779,7 @@ func TestDeleteUntilCurMax(t *testing.T) {
require.True(t, res.Next(), "series is not present") require.True(t, res.Next(), "series is not present")
s := res.At() s := res.At()
it := s.Iterator() it := s.Iterator()
require.False(t, it.Next(), "expected no samples") require.Equal(t, chunkenc.ValNone, it.Next(), "expected no samples")
for res.Next() { for res.Next() {
} }
require.NoError(t, res.Err()) require.NoError(t, res.Err())
@ -798,7 +798,7 @@ func TestDeleteUntilCurMax(t *testing.T) {
it = exps.Iterator() it = exps.Iterator()
resSamples, err := storage.ExpandSamples(it, newSample) resSamples, err := storage.ExpandSamples(it, newSample)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, []tsdbutil.Sample{sample{11, 1, nil}}, resSamples) require.Equal(t, []tsdbutil.Sample{sample{11, 1, nil, nil}}, resSamples)
for res.Next() { for res.Next() {
} }
require.NoError(t, res.Err()) require.NoError(t, res.Err())
@ -912,7 +912,7 @@ func TestDelete_e2e(t *testing.T) {
v := rand.Float64() v := rand.Float64()
_, err := app.Append(0, ls, ts, v) _, err := app.Append(0, ls, ts, v)
require.NoError(t, err) require.NoError(t, err)
series = append(series, sample{ts, v, nil}) series = append(series, sample{ts, v, nil, nil})
ts += rand.Int63n(timeInterval) + 1 ts += rand.Int63n(timeInterval) + 1
} }
seriesMap[labels.New(l...).String()] = series seriesMap[labels.New(l...).String()] = series
@ -979,7 +979,7 @@ func TestDelete_e2e(t *testing.T) {
eok, rok := expSs.Next(), ss.Next() eok, rok := expSs.Next(), ss.Next()
// Skip a series if iterator is empty. // Skip a series if iterator is empty.
if rok { if rok {
for !ss.At().Iterator().Next() { for ss.At().Iterator().Next() == chunkenc.ValNone {
rok = ss.Next() rok = ss.Next()
if !rok { if !rok {
break break
@ -2269,47 +2269,40 @@ func TestMemSafeIteratorSeekIntoBuffer(t *testing.T) {
require.True(t, ok) require.True(t, ok)
// First point. // First point.
ok = it.Seek(0) require.Equal(t, chunkenc.ValFloat, it.Seek(0))
require.True(t, ok)
ts, val := it.At() ts, val := it.At()
require.Equal(t, int64(0), ts) require.Equal(t, int64(0), ts)
require.Equal(t, float64(0), val) require.Equal(t, float64(0), val)
// Advance one point. // Advance one point.
ok = it.Next() require.Equal(t, chunkenc.ValFloat, it.Next())
require.True(t, ok)
ts, val = it.At() ts, val = it.At()
require.Equal(t, int64(1), ts) require.Equal(t, int64(1), ts)
require.Equal(t, float64(1), val) require.Equal(t, float64(1), val)
// Seeking an older timestamp shouldn't cause the iterator to go backwards. // Seeking an older timestamp shouldn't cause the iterator to go backwards.
ok = it.Seek(0) require.Equal(t, chunkenc.ValFloat, it.Seek(0))
require.True(t, ok)
ts, val = it.At() ts, val = it.At()
require.Equal(t, int64(1), ts) require.Equal(t, int64(1), ts)
require.Equal(t, float64(1), val) require.Equal(t, float64(1), val)
// Seek into the buffer. // Seek into the buffer.
ok = it.Seek(3) require.Equal(t, chunkenc.ValFloat, it.Seek(3))
require.True(t, ok)
ts, val = it.At() ts, val = it.At()
require.Equal(t, int64(3), ts) require.Equal(t, int64(3), ts)
require.Equal(t, float64(3), val) require.Equal(t, float64(3), val)
// Iterate through the rest of the buffer. // Iterate through the rest of the buffer.
for i := 4; i < 7; i++ { for i := 4; i < 7; i++ {
ok = it.Next() require.Equal(t, chunkenc.ValFloat, it.Next())
require.True(t, ok)
ts, val = it.At() ts, val = it.At()
require.Equal(t, int64(i), ts) require.Equal(t, int64(i), ts)
require.Equal(t, float64(i), val) require.Equal(t, float64(i), val)
} }
// Run out of elements in the iterator. // Run out of elements in the iterator.
ok = it.Next() require.Equal(t, chunkenc.ValNone, it.Next())
require.False(t, ok) require.Equal(t, chunkenc.ValNone, it.Seek(7))
ok = it.Seek(7)
require.False(t, ok)
} }
// Tests https://github.com/prometheus/prometheus/issues/8221. // Tests https://github.com/prometheus/prometheus/issues/8221.
@ -2358,7 +2351,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) {
// Now consume after compaction when it's gone. // Now consume after compaction when it's gone.
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
_, _ = it.At() _, _ = it.At()
} }
// It should error here without any fix for the mentioned issue. // It should error here without any fix for the mentioned issue.
@ -2366,7 +2359,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) {
for ss.Next() { for ss.Next() {
s = ss.At() s = ss.At()
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() == chunkenc.ValFloat {
_, _ = it.At() _, _ = it.At()
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())
@ -2397,7 +2390,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) {
ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i)) ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i))
require.NoError(t, err) require.NoError(t, err)
maxt = ts maxt = ts
expSamples = append(expSamples, sample{ts, float64(i), nil}) expSamples = append(expSamples, sample{ts, float64(i), nil, nil})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -2565,9 +2558,9 @@ func TestAppendHistogram(t *testing.T) {
it := s.Iterator() it := s.Iterator()
actHistograms := make([]timedHistogram, 0, len(expHistograms)) actHistograms := make([]timedHistogram, 0, len(expHistograms))
for it.Next() { for it.Next() == chunkenc.ValHistogram {
t, h := it.AtHistogram() t, h := it.AtHistogram()
actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) actHistograms = append(actHistograms, timedHistogram{t, h})
} }
require.Equal(t, expHistograms, actHistograms) require.Equal(t, expHistograms, actHistograms)
@ -2622,9 +2615,9 @@ func TestHistogramInWAL(t *testing.T) {
it := s.Iterator() it := s.Iterator()
actHistograms := make([]timedHistogram, 0, len(expHistograms)) actHistograms := make([]timedHistogram, 0, len(expHistograms))
for it.Next() { for it.Next() == chunkenc.ValHistogram {
t, h := it.AtHistogram() t, h := it.AtHistogram()
actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) actHistograms = append(actHistograms, timedHistogram{t, h})
} }
require.Equal(t, expHistograms, actHistograms) require.Equal(t, expHistograms, actHistograms)
@ -2728,7 +2721,7 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk. // 240 samples should m-map at least 1 chunk.
for ts := int64(1); ts <= 240; ts++ { for ts := int64(1); ts <= 240; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
@ -2788,7 +2781,7 @@ func TestChunkSnapshot(t *testing.T) {
// 240 samples should m-map at least 1 chunk. // 240 samples should m-map at least 1 chunk.
for ts := int64(241); ts <= 480; ts++ { for ts := int64(241); ts <= 480; ts++ {
val := rand.Float64() val := rand.Float64()
expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil}) expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil})
ref, err := app.Append(0, lbls, ts, val) ref, err := app.Append(0, lbls, ts, val)
require.NoError(t, err) require.NoError(t, err)
@ -2951,7 +2944,6 @@ func TestHistogramMetrics(t *testing.T) {
} }
} }
require.Equal(t, float64(expHSeries), prom_testutil.ToFloat64(head.metrics.histogramSeries))
require.Equal(t, float64(expHSamples), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal)) require.Equal(t, float64(expHSamples), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal))
require.NoError(t, head.Close()) require.NoError(t, head.Close())
@ -2961,7 +2953,6 @@ func TestHistogramMetrics(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, head.Init(0)) require.NoError(t, head.Init(0))
require.Equal(t, float64(expHSeries), prom_testutil.ToFloat64(head.metrics.histogramSeries))
require.Equal(t, float64(0), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal)) // Counter reset. require.Equal(t, float64(0), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal)) // Counter reset.
} }
@ -2995,9 +2986,9 @@ func TestHistogramStaleSample(t *testing.T) {
it := s.Iterator() it := s.Iterator()
actHistograms := make([]timedHistogram, 0, len(expHistograms)) actHistograms := make([]timedHistogram, 0, len(expHistograms))
for it.Next() { for it.Next() == chunkenc.ValHistogram {
t, h := it.AtHistogram() t, h := it.AtHistogram()
actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) actHistograms = append(actHistograms, timedHistogram{t, h})
} }
// We cannot compare StaleNAN with require.Equal, hence checking each histogram manually. // We cannot compare StaleNAN with require.Equal, hence checking each histogram manually.
@ -3176,7 +3167,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
t int64 t int64
v float64 v float64
h *histogram.Histogram h *histogram.Histogram
enc chunkenc.Encoding vt chunkenc.ValueType
} }
expResult := []result{} expResult := []result{}
ref := storage.SeriesRef(0) ref := storage.SeriesRef(0)
@ -3186,7 +3177,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expResult = append(expResult, result{ expResult = append(expResult, result{
t: ts, t: ts,
v: v, v: v,
enc: chunkenc.EncXOR, vt: chunkenc.ValFloat,
}) })
} }
addHistogramSample := func(app storage.Appender, ts int64, h *histogram.Histogram) { addHistogramSample := func(app storage.Appender, ts int64, h *histogram.Histogram) {
@ -3195,7 +3186,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expResult = append(expResult, result{ expResult = append(expResult, result{
t: ts, t: ts,
h: h, h: h,
enc: chunkenc.EncHistogram, vt: chunkenc.ValHistogram,
}) })
} }
checkExpChunks := func(count int) { checkExpChunks := func(count int) {
@ -3269,17 +3260,25 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
s := ss.At() s := ss.At()
it := s.Iterator() it := s.Iterator()
expIdx := 0 expIdx := 0
for it.Next() { loop:
require.Equal(t, expResult[expIdx].enc, it.ChunkEncoding()) for {
if it.ChunkEncoding() == chunkenc.EncHistogram { vt := it.Next()
ts, h := it.AtHistogram() switch vt {
require.Equal(t, expResult[expIdx].t, ts) case chunkenc.ValNone:
require.Equal(t, expResult[expIdx].h, h) require.Equal(t, len(expResult), expIdx)
} else { break loop
case chunkenc.ValFloat:
ts, v := it.At() ts, v := it.At()
require.Equal(t, expResult[expIdx].t, ts) require.Equal(t, expResult[expIdx].t, ts)
require.Equal(t, expResult[expIdx].v, v) require.Equal(t, expResult[expIdx].v, v)
case chunkenc.ValHistogram:
ts, h := it.AtHistogram()
require.Equal(t, expResult[expIdx].t, ts)
require.Equal(t, expResult[expIdx].h, h)
default:
require.Error(t, fmt.Errorf("unexpected ValueType %v", vt))
} }
require.Equal(t, expResult[expIdx].vt, vt)
expIdx++ expIdx++
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())

View File

@ -14,6 +14,7 @@
package tsdb package tsdb
import ( import (
"fmt"
"math" "math"
"sort" "sort"
"strings" "strings"
@ -627,9 +628,11 @@ type populateWithDelSeriesIterator struct {
curr chunkenc.Iterator curr chunkenc.Iterator
} }
func (p *populateWithDelSeriesIterator) Next() bool { func (p *populateWithDelSeriesIterator) Next() chunkenc.ValueType {
if p.curr != nil && p.curr.Next() { if p.curr != nil {
return true if valueType := p.curr.Next(); valueType != chunkenc.ValNone {
return valueType
}
} }
for p.next() { for p.next() {
@ -638,33 +641,41 @@ func (p *populateWithDelSeriesIterator) Next() bool {
} else { } else {
p.curr = p.currChkMeta.Chunk.Iterator(nil) p.curr = p.currChkMeta.Chunk.Iterator(nil)
} }
if p.curr.Next() { if valueType := p.curr.Next(); valueType != chunkenc.ValNone {
return true return valueType
} }
} }
return false return chunkenc.ValNone
} }
func (p *populateWithDelSeriesIterator) Seek(t int64) bool { func (p *populateWithDelSeriesIterator) Seek(t int64) chunkenc.ValueType {
if p.curr != nil && p.curr.Seek(t) { if p.curr != nil {
return true if valueType := p.curr.Seek(t); valueType != chunkenc.ValNone {
} return valueType
for p.Next() {
if p.curr.Seek(t) {
return true
} }
} }
return false for p.Next() != chunkenc.ValNone {
if valueType := p.curr.Seek(t); valueType != chunkenc.ValNone {
return valueType
}
}
return chunkenc.ValNone
} }
func (p *populateWithDelSeriesIterator) At() (int64, float64) { return p.curr.At() } func (p *populateWithDelSeriesIterator) At() (int64, float64) {
return p.curr.At()
}
func (p *populateWithDelSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (p *populateWithDelSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return p.curr.AtHistogram() return p.curr.AtHistogram()
} }
func (p *populateWithDelSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (p *populateWithDelSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return p.curr.ChunkEncoding() return p.curr.AtFloatHistogram()
}
func (p *populateWithDelSeriesIterator) AtT() int64 {
return p.curr.AtT()
} }
func (p *populateWithDelSeriesIterator) Err() error { func (p *populateWithDelSeriesIterator) Err() error {
@ -693,61 +704,67 @@ func (p *populateWithDelChunkSeriesIterator) Next() bool {
return true return true
} }
// Re-encode the chunk if iterator is provider. This means that it has some samples to be deleted or chunk is opened. valueType := p.currDelIter.Next()
var ( if valueType == chunkenc.ValNone {
newChunk chunkenc.Chunk
app chunkenc.Appender
err error
)
if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram {
newChunk = chunkenc.NewHistogramChunk()
app, err = newChunk.Appender()
} else {
newChunk = chunkenc.NewXORChunk()
app, err = newChunk.Appender()
}
if err != nil {
p.err = err
return false
}
if !p.currDelIter.Next() {
if err := p.currDelIter.Err(); err != nil { if err := p.currDelIter.Err(); err != nil {
p.err = errors.Wrap(err, "iterate chunk while re-encoding") p.err = errors.Wrap(err, "iterate chunk while re-encoding")
return false return false
} }
// Empty chunk, this should not happen, as we assume full deletions being filtered before this iterator. // Empty chunk, this should not happen, as we assume full deletions being filtered before this iterator.
p.err = errors.Wrap(err, "populateWithDelChunkSeriesIterator: unexpected empty chunk found while rewriting chunk") p.err = errors.New("populateWithDelChunkSeriesIterator: unexpected empty chunk found while rewriting chunk")
return false return false
} }
// Re-encode the chunk if iterator is provider. This means that it has some samples to be deleted or chunk is opened.
var ( var (
newChunk chunkenc.Chunk
app chunkenc.Appender
t int64 t int64
v float64 err error
h *histogram.Histogram
) )
if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram { switch valueType {
case chunkenc.ValHistogram:
newChunk = chunkenc.NewHistogramChunk()
if app, err = newChunk.Appender(); err != nil {
break
}
if hc, ok := p.currChkMeta.Chunk.(*chunkenc.HistogramChunk); ok { if hc, ok := p.currChkMeta.Chunk.(*chunkenc.HistogramChunk); ok {
newChunk.(*chunkenc.HistogramChunk).SetCounterResetHeader(hc.GetCounterResetHeader()) newChunk.(*chunkenc.HistogramChunk).SetCounterResetHeader(hc.GetCounterResetHeader())
} }
var h *histogram.Histogram
t, h = p.currDelIter.AtHistogram() t, h = p.currDelIter.AtHistogram()
p.curr.MinTime = t p.curr.MinTime = t
app.AppendHistogram(t, h.Copy()) app.AppendHistogram(t, h)
for p.currDelIter.Next() { for p.currDelIter.Next() == chunkenc.ValHistogram {
// TODO(beorn7): Is it possible that the value type changes during iteration?
t, h = p.currDelIter.AtHistogram() t, h = p.currDelIter.AtHistogram()
app.AppendHistogram(t, h.Copy()) app.AppendHistogram(t, h)
} }
} else { case chunkenc.ValFloat:
newChunk = chunkenc.NewXORChunk()
if app, err = newChunk.Appender(); err != nil {
break
}
var v float64
t, v = p.currDelIter.At() t, v = p.currDelIter.At()
p.curr.MinTime = t p.curr.MinTime = t
app.Append(t, v) app.Append(t, v)
for p.currDelIter.Next() { for p.currDelIter.Next() == chunkenc.ValFloat {
// TODO(beorn7): Is it possible that the value type changes during iteration?
t, v = p.currDelIter.At() t, v = p.currDelIter.At()
app.Append(t, v) app.Append(t, v)
} }
}
default:
// TODO(beorn7): Need FloatHistogram eventually.
err = fmt.Errorf("populateWithDelChunkSeriesIterator: value type %v unsupported", valueType)
}
if err != nil {
p.err = errors.Wrap(err, "iterate chunk while re-encoding")
return false
}
if err := p.currDelIter.Err(); err != nil { if err := p.currDelIter.Err(); err != nil {
p.err = errors.Wrap(err, "iterate chunk while re-encoding") p.err = errors.Wrap(err, "iterate chunk while re-encoding")
return false return false
@ -888,28 +905,29 @@ func (it *DeletedIterator) AtHistogram() (int64, *histogram.Histogram) {
return t, h return t, h
} }
func (it *DeletedIterator) ChunkEncoding() chunkenc.Encoding { func (it *DeletedIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return it.Iter.ChunkEncoding() t, h := it.Iter.AtFloatHistogram()
return t, h
} }
func (it *DeletedIterator) Seek(t int64) bool { func (it *DeletedIterator) AtT() int64 {
return it.Iter.AtT()
}
func (it *DeletedIterator) Seek(t int64) chunkenc.ValueType {
if it.Iter.Err() != nil { if it.Iter.Err() != nil {
return false return chunkenc.ValNone
} }
if ok := it.Iter.Seek(t); !ok { valueType := it.Iter.Seek(t)
return false if valueType == chunkenc.ValNone {
return chunkenc.ValNone
} }
// Now double check if the entry falls into a deleted interval. // Now double check if the entry falls into a deleted interval.
var ts int64 ts := it.AtT()
if it.ChunkEncoding() == chunkenc.EncHistogram {
ts, _ = it.AtHistogram()
} else {
ts, _ = it.At()
}
for _, itv := range it.Intervals { for _, itv := range it.Intervals {
if ts < itv.Mint { if ts < itv.Mint {
return true return valueType
} }
if ts > itv.Maxt { if ts > itv.Maxt {
@ -922,32 +940,26 @@ func (it *DeletedIterator) Seek(t int64) bool {
} }
// The timestamp is greater than all the deleted intervals. // The timestamp is greater than all the deleted intervals.
return true return valueType
} }
func (it *DeletedIterator) Next() bool { func (it *DeletedIterator) Next() chunkenc.ValueType {
Outer: Outer:
for it.Iter.Next() { for valueType := it.Iter.Next(); valueType != chunkenc.ValNone; valueType = it.Iter.Next() {
var ts int64 ts := it.AtT()
if it.ChunkEncoding() == chunkenc.EncHistogram {
ts, _ = it.AtHistogram()
} else {
ts, _ = it.At()
}
for _, tr := range it.Intervals { for _, tr := range it.Intervals {
if tr.InBounds(ts) { if tr.InBounds(ts) {
continue Outer continue Outer
} }
if ts <= tr.Maxt { if ts <= tr.Maxt {
return true return valueType
} }
it.Intervals = it.Intervals[1:] it.Intervals = it.Intervals[1:]
} }
return true return valueType
} }
return false return chunkenc.ValNone
} }
func (it *DeletedIterator) Err() error { return it.Iter.Err() } func (it *DeletedIterator) Err() error { return it.Iter.Err() }

View File

@ -278,24 +278,24 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []tsdbutil.Sample{sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}},
), ),
}), }),
}, },
@ -305,18 +305,18 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
}, },
@ -329,20 +329,20 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
}, },
@ -355,18 +355,18 @@ func TestBlockQuerier(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
}, },
@ -414,24 +414,24 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}},
), ),
}), }),
}, },
@ -441,18 +441,18 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}},
), ),
}), }),
}, },
@ -493,22 +493,22 @@ var testData = []seriesSamples{
{ {
lset: map[string]string{"a": "a"}, lset: map[string]string{"a": "a"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 2, nil}, {2, 3, nil}, {3, 4, nil}}, {{1, 2, nil, nil}, {2, 3, nil, nil}, {3, 4, nil, nil}},
{{5, 2, nil}, {6, 3, nil}, {7, 4, nil}}, {{5, 2, nil, nil}, {6, 3, nil, nil}, {7, 4, nil, nil}},
}, },
}, },
{ {
lset: map[string]string{"a": "a", "b": "b"}, lset: map[string]string{"a": "a", "b": "b"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 1, nil}, {2, 2, nil}, {3, 3, nil}}, {{1, 1, nil, nil}, {2, 2, nil, nil}, {3, 3, nil, nil}},
{{5, 3, nil}, {6, 6, nil}}, {{5, 3, nil, nil}, {6, 6, nil, nil}},
}, },
}, },
{ {
lset: map[string]string{"b": "b"}, lset: map[string]string{"b": "b"},
chunks: [][]sample{ chunks: [][]sample{
{{1, 3, nil}, {2, 2, nil}, {3, 6, nil}}, {{1, 3, nil, nil}, {2, 2, nil, nil}, {3, 6, nil, nil}},
{{5, 1, nil}, {6, 7, nil}, {7, 2, nil}}, {{5, 1, nil, nil}, {6, 7, nil, nil}, {7, 2, nil, nil}},
}, },
}, },
} }
@ -555,24 +555,24 @@ func TestBlockQuerierDelete(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}}, []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []tsdbutil.Sample{sample{5, 1, nil, nil}},
), ),
}), }),
}, },
@ -582,18 +582,18 @@ func TestBlockQuerierDelete(t *testing.T) {
ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")},
exp: newMockSeriesSet([]storage.Series{ exp: newMockSeriesSet([]storage.Series{
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}},
), ),
}), }),
expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ expChks: newMockChunkSeriesSet([]storage.ChunkSeries{
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}},
[]tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}},
), ),
storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}},
[]tsdbutil.Sample{sample{5, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}},
), ),
}), }),
}, },
@ -676,57 +676,57 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "one chunk", name: "one chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}), }),
}, },
}, },
{ {
name: "two full chunks", name: "two full chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89, nil}, sample{9, 8, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}), }),
}, },
}, },
{ {
name: "three full chunks", name: "three full chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
{sample{10, 22, nil}, sample{203, 3493, nil}}, {sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}},
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{10, 22, nil}, sample{203, 3493, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89, nil}, sample{9, 8, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{10, 22, nil}, sample{203, 3493, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil},
}), }),
}, },
}, },
@ -741,8 +741,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks and seek beyond chunks", name: "two chunks and seek beyond chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
seek: 10, seek: 10,
@ -751,27 +751,27 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks and seek on middle of first chunk", name: "two chunks and seek on middle of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
seek: 2, seek: 2,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}, },
}, },
{ {
name: "two chunks and seek before first chunk", name: "two chunks and seek before first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
seek: -32, seek: -32,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}, },
}, },
// Deletion / Trim cases. // Deletion / Trim cases.
@ -783,60 +783,60 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks with trimmed first and last samples from edge chunks", name: "two chunks with trimmed first and last samples from edge chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}),
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{3, 5, nil}, sample{6, 1, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89, nil}, sample{7, 89, nil, nil},
}), }),
}, },
}, },
{ {
name: "two chunks with trimmed middle sample of first chunk", name: "two chunks with trimmed middle sample of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}},
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2, nil}, sample{6, 1, nil}, sample{1, 2, nil, nil}, sample{6, 1, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{7, 89, nil}, sample{9, 8, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}), }),
}, },
}, },
{ {
name: "two chunks with deletion across two chunks", name: "two chunks with deletion across two chunks",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}},
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{9, 8, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{9, 8, nil, nil},
}, },
expectedChks: []chunks.Meta{ expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil},
}), }),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
sample{9, 8, nil}, sample{9, 8, nil, nil},
}), }),
}, },
}, },
@ -844,15 +844,15 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
{ {
name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk",
chks: [][]tsdbutil.Sample{ chks: [][]tsdbutil.Sample{
{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}},
{sample{7, 89, nil}, sample{9, 8, nil}}, {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}},
}, },
intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}),
seek: 3, seek: 3,
seekSuccess: true, seekSuccess: true,
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil},
}, },
}, },
} }
@ -864,8 +864,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
var r []tsdbutil.Sample var r []tsdbutil.Sample
if tc.seek != 0 { if tc.seek != 0 {
require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) require.Equal(t, tc.seekSuccess, it.Seek(tc.seek) == chunkenc.ValFloat)
require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) // Next one should be noop. require.Equal(t, tc.seekSuccess, it.Seek(tc.seek) == chunkenc.ValFloat) // Next one should be noop.
if tc.seekSuccess { if tc.seekSuccess {
// After successful seek iterator is ready. Grab the value. // After successful seek iterator is ready. Grab the value.
@ -908,14 +908,14 @@ func rmChunkRefs(chks []chunks.Meta) {
func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
[]tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}},
[]tsdbutil.Sample{sample{4, 4, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{5, 5, nil, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator()
require.True(t, it.Seek(1)) require.Equal(t, chunkenc.ValFloat, it.Seek(1))
require.True(t, it.Seek(2)) require.Equal(t, chunkenc.ValFloat, it.Seek(2))
require.True(t, it.Seek(2)) require.Equal(t, chunkenc.ValFloat, it.Seek(2))
ts, v := it.At() ts, v := it.At()
require.Equal(t, int64(2), ts) require.Equal(t, int64(2), ts)
require.Equal(t, float64(2), v) require.Equal(t, float64(2), v)
@ -926,17 +926,17 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) {
func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
[]tsdbutil.Sample{sample{1, 2, nil}, sample{3, 4, nil}, sample{5, 6, nil}, sample{7, 8, nil}}, []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{3, 4, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}},
[]tsdbutil.Sample{}, []tsdbutil.Sample{},
) )
it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator()
require.True(t, it.Next()) require.Equal(t, chunkenc.ValFloat, it.Next())
ts, v := it.At() ts, v := it.At()
require.Equal(t, int64(1), ts) require.Equal(t, int64(1), ts)
require.Equal(t, float64(2), v) require.Equal(t, float64(2), v)
require.True(t, it.Seek(4)) require.Equal(t, chunkenc.ValFloat, it.Seek(4))
ts, v = it.At() ts, v = it.At()
require.Equal(t, int64(5), ts) require.Equal(t, int64(5), ts)
require.Equal(t, float64(6), v) require.Equal(t, float64(6), v)
@ -944,25 +944,25 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) {
func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{6, 8, nil}}, []tsdbutil.Sample{sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{6, 8, nil, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator()
require.Equal(t, false, it.Seek(7)) require.Equal(t, chunkenc.ValNone, it.Seek(7))
require.Equal(t, true, it.Seek(3)) require.Equal(t, chunkenc.ValFloat, it.Seek(3))
} }
// Regression when calling Next() with a time bounded to fit within two samples. // Regression when calling Next() with a time bounded to fit within two samples.
// Seek gets called and advances beyond the max time, which was just accepted as a valid sample. // Seek gets called and advances beyond the max time, which was just accepted as a valid sample.
func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) {
f, chkMetas := createFakeReaderAndNotPopulatedChunks( f, chkMetas := createFakeReaderAndNotPopulatedChunks(
[]tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{7, 8, nil}}, []tsdbutil.Sample{sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}},
) )
it := newPopulateWithDelGenericSeriesIterator( it := newPopulateWithDelGenericSeriesIterator(
f, chkMetas, tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}), f, chkMetas, tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}),
).toSeriesIterator() ).toSeriesIterator()
require.Equal(t, false, it.Next()) require.Equal(t, chunkenc.ValNone, it.Next())
} }
// Test the cost of merging series sets for different number of merged sets and their size. // Test the cost of merging series sets for different number of merged sets and their size.
@ -1062,7 +1062,7 @@ func TestDeletedIterator(t *testing.T) {
i := int64(-1) i := int64(-1)
it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]}
ranges := c.r[:] ranges := c.r[:]
for it.Next() { for it.Next() == chunkenc.ValFloat {
i++ i++
for _, tr := range ranges { for _, tr := range ranges {
if tr.InBounds(i) { if tr.InBounds(i) {
@ -1124,9 +1124,9 @@ func TestDeletedIterator_WithSeek(t *testing.T) {
for _, c := range cases { for _, c := range cases {
it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]}
require.Equal(t, c.ok, it.Seek(c.seek)) require.Equal(t, c.ok, it.Seek(c.seek) == chunkenc.ValFloat)
if c.ok { if c.ok {
ts, _ := it.At() ts := it.AtT()
require.Equal(t, c.seekedTs, ts) require.Equal(t, c.seekedTs, ts)
} }
} }
@ -2057,7 +2057,7 @@ func benchQuery(b *testing.B, expExpansions int, q storage.Querier, selectors la
s := ss.At() s := ss.At()
s.Labels() s.Labels()
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() != chunkenc.ValNone {
} }
actualExpansions++ actualExpansions++
} }

View File

@ -57,6 +57,7 @@ type RefSeries struct {
} }
// RefSample is a timestamp/value pair associated with a reference to a series. // RefSample is a timestamp/value pair associated with a reference to a series.
// TODO(beorn7): Perhaps make this "polymorphic", including histogram and float-histogram pointers? Then get rid of RefHistogram.
type RefSample struct { type RefSample struct {
Ref chunks.HeadSeriesRef Ref chunks.HeadSeriesRef
T int64 T int64

View File

@ -21,6 +21,7 @@ import (
"github.com/go-kit/log" "github.com/go-kit/log"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
var ErrInvalidTimes = fmt.Errorf("max time is lesser than min time") var ErrInvalidTimes = fmt.Errorf("max time is lesser than min time")
@ -51,7 +52,8 @@ func CreateBlock(series []storage.Series, dir string, chunkRange int64, logger l
ref := storage.SeriesRef(0) ref := storage.SeriesRef(0)
it := s.Iterator() it := s.Iterator()
lset := s.Labels() lset := s.Labels()
for it.Next() { for it.Next() == chunkenc.ValFloat {
// TODO(beorn7): Add histogram support.
t, v := it.At() t, v := it.At()
ref, err = app.Append(ref, lset, t, v) ref, err = app.Append(ref, lset, t, v)
if err != nil { if err != nil {

View File

@ -14,13 +14,20 @@
package tsdbutil package tsdbutil
import ( import (
"fmt"
"math" "math"
"github.com/pkg/errors"
"github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
) )
// BufferedSeriesIterator wraps an iterator with a look-back buffer. // BufferedSeriesIterator wraps an iterator with a look-back buffer.
//
// TODO(beorn7): BufferedSeriesIterator does not support Histograms or
// FloatHistograms. Either add support or remove BufferedSeriesIterator
// altogether (it seems unused).
type BufferedSeriesIterator struct { type BufferedSeriesIterator struct {
it chunkenc.Iterator it chunkenc.Iterator
buf *sampleRing buf *sampleRing
@ -50,7 +57,7 @@ func (b *BufferedSeriesIterator) Buffer() chunkenc.Iterator {
} }
// Seek advances the iterator to the element at time t or greater. // Seek advances the iterator to the element at time t or greater.
func (b *BufferedSeriesIterator) Seek(t int64) bool { func (b *BufferedSeriesIterator) Seek(t int64) chunkenc.ValueType {
t0 := t - b.buf.delta t0 := t - b.buf.delta
// If the delta would cause us to seek backwards, preserve the buffer // If the delta would cause us to seek backwards, preserve the buffer
@ -58,35 +65,43 @@ func (b *BufferedSeriesIterator) Seek(t int64) bool {
if t0 > b.lastTime { if t0 > b.lastTime {
b.buf.reset() b.buf.reset()
ok := b.it.Seek(t0) if b.it.Seek(t0) == chunkenc.ValNone {
if !ok { return chunkenc.ValNone
return false
} }
b.lastTime, _ = b.At() b.lastTime = b.AtT()
} }
if b.lastTime >= t { if b.lastTime >= t {
return true return chunkenc.ValFloat
} }
for b.Next() { for {
valueType := b.Next()
switch valueType {
case chunkenc.ValNone:
return chunkenc.ValNone
case chunkenc.ValFloat:
if b.lastTime >= t { if b.lastTime >= t {
return true return valueType
}
default:
panic(fmt.Errorf("BufferedSeriesIterator: unsupported value type %v", valueType))
}
if b.lastTime >= t {
return valueType
} }
} }
return false
} }
// Next advances the iterator to the next element. // Next advances the iterator to the next element.
func (b *BufferedSeriesIterator) Next() bool { func (b *BufferedSeriesIterator) Next() chunkenc.ValueType {
// Add current element to buffer before advancing. // Add current element to buffer before advancing.
b.buf.add(b.it.At()) b.buf.add(b.it.At())
ok := b.it.Next() valueType := b.it.Next()
if ok { if valueType != chunkenc.ValNone {
b.lastTime, _ = b.At() b.lastTime = b.AtT()
} }
return ok return valueType
} }
// At returns the current element of the iterator. // At returns the current element of the iterator.
@ -94,6 +109,21 @@ func (b *BufferedSeriesIterator) At() (int64, float64) {
return b.it.At() return b.it.At()
} }
// AtHistogram is unsupported.
func (b *BufferedSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
panic(errors.New("BufferedSeriesIterator: AtHistogram not implemented"))
}
// AtFloatHistogram is unsupported.
func (b *BufferedSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
panic(errors.New("BufferedSeriesIterator: AtFloatHistogram not implemented"))
}
// At returns the timestamp of the current element of the iterator.
func (b *BufferedSeriesIterator) AtT() int64 {
return b.it.AtT()
}
// Err returns the last encountered error. // Err returns the last encountered error.
func (b *BufferedSeriesIterator) Err() error { func (b *BufferedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()
@ -103,6 +133,7 @@ type sample struct {
t int64 t int64
v float64 v float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram
} }
func (s sample) T() int64 { func (s sample) T() int64 {
@ -117,6 +148,21 @@ func (s sample) H() *histogram.Histogram {
return s.h return s.h
} }
func (s sample) FH() *histogram.FloatHistogram {
return s.fh
}
func (s sample) Type() chunkenc.ValueType {
switch {
case s.h != nil:
return chunkenc.ValHistogram
case s.fh != nil:
return chunkenc.ValFloatHistogram
default:
return chunkenc.ValFloat
}
}
type sampleRing struct { type sampleRing struct {
delta int64 delta int64
@ -148,13 +194,16 @@ type sampleRingIterator struct {
i int i int
} }
func (it *sampleRingIterator) Next() bool { func (it *sampleRingIterator) Next() chunkenc.ValueType {
it.i++ it.i++
return it.i < it.r.l if it.i < it.r.l {
return chunkenc.ValFloat
}
return chunkenc.ValNone
} }
func (it *sampleRingIterator) Seek(int64) bool { func (it *sampleRingIterator) Seek(int64) chunkenc.ValueType {
return false return chunkenc.ValNone
} }
func (it *sampleRingIterator) Err() error { func (it *sampleRingIterator) Err() error {
@ -166,12 +215,16 @@ func (it *sampleRingIterator) At() (int64, float64) {
} }
func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) { func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
// TODO(beorn7): Add proper histogram support. panic(errors.New("sampleRingIterator: AtHistogram not implemented"))
return 0, nil
} }
func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding { func (it *sampleRingIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return chunkenc.EncXOR panic(errors.New("sampleRingIterator: AtFloatHistogram not implemented"))
}
func (it *sampleRingIterator) AtT() int64 {
t, _ := it.r.at(it.i)
return t
} }
func (r *sampleRing) at(i int) (int64, float64) { func (r *sampleRing) at(i int) (int64, float64) {

View File

@ -91,7 +91,7 @@ func TestBufferedSeriesIterator(t *testing.T) {
bufferEq := func(exp []sample) { bufferEq := func(exp []sample) {
var b []sample var b []sample
bit := it.Buffer() bit := it.Buffer()
for bit.Next() { for bit.Next() == chunkenc.ValFloat {
t, v := bit.At() t, v := bit.At()
b = append(b, sample{t: t, v: v}) b = append(b, sample{t: t, v: v})
} }
@ -114,29 +114,29 @@ func TestBufferedSeriesIterator(t *testing.T) {
{t: 101, v: 10}, {t: 101, v: 10},
}), 2) }), 2)
require.True(t, it.Seek(-123), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed")
sampleEq(1, 2) sampleEq(1, 2)
bufferEq(nil) bufferEq(nil)
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(2, 3) sampleEq(2, 3)
bufferEq([]sample{{t: 1, v: 2}}) bufferEq([]sample{{t: 1, v: 2}})
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.True(t, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(5, 6) sampleEq(5, 6)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}})
require.True(t, it.Seek(5), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed")
sampleEq(5, 6) sampleEq(5, 6)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}})
require.True(t, it.Seek(101), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed")
sampleEq(101, 10) sampleEq(101, 10)
bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}}) bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}})
require.False(t, it.Next(), "next succeeded unexpectedly") require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly")
} }
type listSeriesIterator struct { type listSeriesIterator struct {
@ -158,26 +158,42 @@ func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return s.t, s.h return s.t, s.h
} }
func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { func (it *listSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return chunkenc.EncXOR s := it.list[it.idx]
return s.t, s.fh
} }
func (it *listSeriesIterator) Next() bool { func (it *listSeriesIterator) AtT() int64 {
s := it.list[it.idx]
return s.t
}
func (it *listSeriesIterator) Next() chunkenc.ValueType {
it.idx++ it.idx++
return it.idx < len(it.list) if it.idx >= len(it.list) {
return chunkenc.ValNone
}
return it.list[it.idx].Type()
} }
func (it *listSeriesIterator) Seek(t int64) bool { func (it *listSeriesIterator) Seek(t int64) chunkenc.ValueType {
if it.idx == -1 { if it.idx == -1 {
it.idx = 0 it.idx = 0
} }
// No-op check.
if s := it.list[it.idx]; s.t >= t {
return s.Type()
}
// Do binary search between current position and end. // Do binary search between current position and end.
it.idx = sort.Search(len(it.list)-it.idx, func(i int) bool { it.idx += sort.Search(len(it.list)-it.idx, func(i int) bool {
s := it.list[i+it.idx] s := it.list[i+it.idx]
return s.t >= t return s.t >= t
}) })
return it.idx < len(it.list) if it.idx >= len(it.list) {
return chunkenc.ValNone
}
return it.list[it.idx].Type()
} }
func (it *listSeriesIterator) Err() error { func (it *listSeriesIterator) Err() error {

View File

@ -28,6 +28,8 @@ type Sample interface {
T() int64 T() int64
V() float64 V() float64
H() *histogram.Histogram H() *histogram.Histogram
FH() *histogram.FloatHistogram
Type() chunkenc.ValueType
} }
type SampleSlice []Sample type SampleSlice []Sample

View File

@ -33,6 +33,7 @@ import (
"github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/promql/parser"
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb" "github.com/prometheus/prometheus/tsdb"
"github.com/prometheus/prometheus/tsdb/chunkenc"
) )
var ( var (
@ -110,9 +111,10 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) {
var t int64 var t int64
var v float64 var v float64
var ok bool
ok := it.Seek(maxt) valueType := it.Seek(maxt)
if ok { if valueType == chunkenc.ValFloat {
t, v = it.Values() t, v = it.Values()
} else { } else {
// TODO(beorn7): Handle histograms. // TODO(beorn7): Handle histograms.