Browse Source

tsdbutil/ChunkFromSamplesGeneric should not panic

Add error handling instead.
Prepares for #12352

Signed-off-by: György Krajcsovits <gyorgy.krajcsovits@grafana.com>
pull/12582/head
György Krajcsovits 1 year ago
parent
commit
d4e355243a
  1. 18
      storage/series.go
  2. 26
      tsdb/db_test.go
  3. 50
      tsdb/querier_test.go
  4. 12
      tsdb/tsdbutil/chunks.go

18
storage/series.go

@ -58,7 +58,21 @@ func NewListSeries(lset labels.Labels, s []tsdbutil.Sample) *SeriesEntry {
// NewListChunkSeriesFromSamples returns chunk series entry that allows to iterate over provided samples.
// NOTE: It uses inefficient chunks encoding implementation, not caring about chunk size.
// Use only for testing.
func NewListChunkSeriesFromSamples(lset labels.Labels, samples ...[]tsdbutil.Sample) *ChunkSeriesEntry {
chksFromSamples := make([]chunks.Meta, 0, len(samples))
for _, s := range samples {
cfs, err := tsdbutil.ChunkFromSamples(s)
if err != nil {
return &ChunkSeriesEntry{
Lset: lset,
ChunkIteratorFn: func(it chunks.Iterator) chunks.Iterator {
return errChunksIterator{err: err}
},
}
}
chksFromSamples = append(chksFromSamples, cfs)
}
return &ChunkSeriesEntry{
Lset: lset,
ChunkIteratorFn: func(it chunks.Iterator) chunks.Iterator {
@ -69,9 +83,7 @@ func NewListChunkSeriesFromSamples(lset labels.Labels, samples ...[]tsdbutil.Sam
} else {
chks = make([]chunks.Meta, 0, len(samples))
}
for _, s := range samples {
chks = append(chks, tsdbutil.ChunkFromSamples(s))
}
chks = append(chks, chksFromSamples...)
if existing {
lcsi.Reset(chks...)
return lcsi

26
tsdb/db_test.go

@ -2713,14 +2713,20 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
require.Equal(t, map[string][]sample{`{foo="bar"}`: {{t: 0, f: 0}}}, seriesSet)
}
func assureChunkFromSamples(t *testing.T, samples []tsdbutil.Sample) chunks.Meta {
chks, err := tsdbutil.ChunkFromSamples(samples)
require.NoError(t, err)
return chks
}
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
// that the resulted segments includes the expected chunks data.
func TestChunkWriter_ReadAfterWrite(t *testing.T) {
chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}})
chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}})
chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}})
chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}})
chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}})
chk1 := assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 1, nil, nil}})
chk2 := assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 2, nil, nil}})
chk3 := assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 3, nil, nil}})
chk4 := assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 4, nil, nil}})
chk5 := assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 5, nil, nil}})
chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size
tests := []struct {
@ -2920,11 +2926,11 @@ func TestRangeForTimestamp(t *testing.T) {
// Regression test for https://github.com/prometheus/prometheus/pull/6514.
func TestChunkReader_ConcurrentReads(t *testing.T) {
chks := []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}}),
assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 1, nil, nil}}),
assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 2, nil, nil}}),
assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 3, nil, nil}}),
assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 4, nil, nil}}),
assureChunkFromSamples(t, []tsdbutil.Sample{sample{1, 5, nil, nil}}),
}
tempDir := t.TempDir()

50
tsdb/querier_test.go

@ -670,7 +670,7 @@ func createFakeReaderAndNotPopulatedChunks(s ...[]tsdbutil.Sample) (*fakeChunksR
chks := make([]chunks.Meta, 0, len(s))
for ref, samples := range s {
chk := tsdbutil.ChunkFromSamples(samples)
chk, _ := tsdbutil.ChunkFromSamples(samples)
f.chks[chunks.ChunkRef(ref)] = chk.Chunk
chks = append(chks, chunks.Meta{
@ -713,7 +713,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
chks: [][]tsdbutil.Sample{{}},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
assureChunkFromSamples(t, []tsdbutil.Sample{}),
},
},
{
@ -721,9 +721,9 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
chks: [][]tsdbutil.Sample{{}, {}, {}},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{}),
assureChunkFromSamples(t, []tsdbutil.Sample{}),
assureChunkFromSamples(t, []tsdbutil.Sample{}),
assureChunkFromSamples(t, []tsdbutil.Sample{}),
},
},
{
@ -736,7 +736,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}),
},
@ -752,10 +752,10 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}),
},
@ -772,13 +772,13 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{10, 22, nil, nil}, sample{203, 3493, nil, nil},
}),
},
@ -845,10 +845,10 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{3, 5, nil, nil}, sample{6, 1, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{7, 89, nil, nil},
}),
},
@ -865,10 +865,10 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 2, nil, nil}, sample{6, 1, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{7, 89, nil, nil}, sample{9, 8, nil, nil},
}),
},
@ -885,10 +885,10 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{9, 8, nil, nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil},
}),
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{9, 8, nil, nil},
}),
},
@ -925,7 +925,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{6, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(6)), nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil},
sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil},
sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil},
@ -950,7 +950,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, tsdbutil.GenerateTestHistogram(1), nil},
sample{2, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(2)), nil},
sample{3, 0, tsdbutil.SetHistogramNotCounterReset(tsdbutil.GenerateTestHistogram(3)), nil},
@ -974,7 +974,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{6, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(6))},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)},
sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))},
sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))},
@ -999,7 +999,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, nil, tsdbutil.GenerateTestFloatHistogram(1)},
sample{2, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(2))},
sample{3, 0, nil, tsdbutil.SetFloatHistogramNotCounterReset(tsdbutil.GenerateTestFloatHistogram(3))},
@ -1023,7 +1023,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{6, 0, tsdbutil.GenerateTestGaugeHistogram(6), nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil},
sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil},
sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil},
@ -1048,7 +1048,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, tsdbutil.GenerateTestGaugeHistogram(1), nil},
sample{2, 0, tsdbutil.GenerateTestGaugeHistogram(2), nil},
sample{3, 0, tsdbutil.GenerateTestGaugeHistogram(3), nil},
@ -1072,7 +1072,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{6, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(6)},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)},
sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)},
sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)},
@ -1097,7 +1097,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)},
},
expectedChks: []chunks.Meta{
tsdbutil.ChunkFromSamples([]tsdbutil.Sample{
assureChunkFromSamples(t, []tsdbutil.Sample{
sample{1, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(1)},
sample{2, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(2)},
sample{3, 0, nil, tsdbutil.GenerateTestGaugeFloatHistogram(3)},

12
tsdb/tsdbutil/chunks.go

@ -40,12 +40,12 @@ func (s SampleSlice) Get(i int) Sample { return s[i] }
func (s SampleSlice) Len() int { return len(s) }
// ChunkFromSamples requires all samples to have the same type.
func ChunkFromSamples(s []Sample) chunks.Meta {
func ChunkFromSamples(s []Sample) (chunks.Meta, error) {
return ChunkFromSamplesGeneric(SampleSlice(s))
}
// ChunkFromSamplesGeneric requires all samples to have the same type.
func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
func ChunkFromSamplesGeneric(s Samples) (chunks.Meta, error) {
mint, maxt := int64(0), int64(0)
if s.Len() > 0 {
@ -55,13 +55,13 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
if s.Len() == 0 {
return chunks.Meta{
Chunk: chunkenc.NewXORChunk(),
}
}, nil
}
sampleType := s.Get(0).Type()
c, err := chunkenc.NewEmptyChunk(sampleType.ChunkEncoding())
if err != nil {
panic(err) // TODO(codesome): dont panic.
return chunks.Meta{}, err
}
ca, _ := c.Appender()
@ -92,7 +92,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
MinTime: mint,
MaxTime: maxt,
Chunk: c,
}
}, nil
}
type sample struct {
@ -130,7 +130,7 @@ func (s sample) Type() chunkenc.ValueType {
}
// PopulatedChunk creates a chunk populated with samples every second starting at minTime
func PopulatedChunk(numSamples int, minTime int64) chunks.Meta {
func PopulatedChunk(numSamples int, minTime int64) (chunks.Meta, error) {
samples := make([]Sample, numSamples)
for i := 0; i < numSamples; i++ {
samples[i] = sample{t: minTime + int64(i*1000), f: 1.0}

Loading…
Cancel
Save