@ -104,6 +104,36 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str
return result
return result
}
}
// queryChunks runs a matcher query against the querier and fully expands its data.
func queryChunks ( t testing . TB , q storage . ChunkQuerier , matchers ... * labels . Matcher ) map [ string ] [ ] chunks . Meta {
ss := q . Select ( false , nil , matchers ... )
defer func ( ) {
testutil . Ok ( t , q . Close ( ) )
} ( )
result := map [ string ] [ ] chunks . Meta { }
for ss . Next ( ) {
series := ss . At ( )
chks := [ ] chunks . Meta { }
it := series . Iterator ( )
for it . Next ( ) {
chks = append ( chks , it . At ( ) )
}
testutil . Ok ( t , it . Err ( ) )
if len ( chks ) == 0 {
continue
}
name := series . Labels ( ) . String ( )
result [ name ] = chks
}
testutil . Ok ( t , ss . Err ( ) )
testutil . Equals ( t , 0 , len ( ss . Warnings ( ) ) )
return result
}
// Ensure that blocks are held in memory in their time order
// Ensure that blocks are held in memory in their time order
// and not in ULID order as they are read from the directory.
// and not in ULID order as they are read from the directory.
func TestDB_reloadOrder ( t * testing . T ) {
func TestDB_reloadOrder ( t * testing . T ) {
@ -387,7 +417,7 @@ Outer:
}
}
expss := newMockSeriesSet ( [ ] storage . Series {
expss := newMockSeriesSet ( [ ] storage . Series {
newSeries ( map [ string ] string { "a" : "b" } , expSamples ) ,
storage . NewListSeries ( labels . FromStrings ( "a" , "b" ) , expSamples ) ,
} )
} )
for {
for {
@ -403,8 +433,8 @@ Outer:
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplExp , errExp := storage . ExpandSamples ( sexp . Iterator ( ) , nil )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
smplRes , errRes := storage . ExpandSamples ( sres . Iterator ( ) , nil )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
testutil . Equals ( t , smplExp , smplRes )
@ -683,7 +713,7 @@ Outer:
}
}
expss := newMockSeriesSet ( [ ] storage . Series {
expss := newMockSeriesSet ( [ ] storage . Series {
newSeries ( map [ string ] string { "a" : "b" } , expSamples ) ,
storage . NewListSeries ( labels . FromStrings ( "a" , "b" ) , expSamples ) ,
} )
} )
if len ( expSamples ) == 0 {
if len ( expSamples ) == 0 {
@ -704,8 +734,8 @@ Outer:
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplExp , errExp := storage . ExpandSamples ( sexp . Iterator ( ) , nil )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
smplRes , errRes := storage . ExpandSamples ( sres . Iterator ( ) , nil )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
testutil . Equals ( t , smplExp , smplRes )
@ -854,7 +884,7 @@ func TestDB_e2e(t *testing.T) {
for ss . Next ( ) {
for ss . Next ( ) {
x := ss . At ( )
x := ss . At ( )
smpls , err := expandSeriesIterator ( x . Iterator ( ) )
smpls , err := storage . ExpandSamples ( x . Iterator ( ) , newSample )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
if len ( smpls ) > 0 {
if len ( smpls ) > 0 {
@ -1033,7 +1063,7 @@ func TestTombstoneClean(t *testing.T) {
}
}
expss := newMockSeriesSet ( [ ] storage . Series {
expss := newMockSeriesSet ( [ ] storage . Series {
newSeries ( map [ string ] string { "a" : "b" } , expSamples ) ,
storage . NewListSeries ( labels . FromStrings ( "a" , "b" ) , expSamples ) ,
} )
} )
if len ( expSamples ) == 0 {
if len ( expSamples ) == 0 {
@ -1053,8 +1083,8 @@ func TestTombstoneClean(t *testing.T) {
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
testutil . Equals ( t , sexp . Labels ( ) , sres . Labels ( ) )
smplExp , errExp := expandSeriesIterator ( sexp . Iterator ( ) )
smplExp , errExp := storage . ExpandSamples ( sexp . Iterator ( ) , nil )
smplRes , errRes := expandSeriesIterator ( sres . Iterator ( ) )
smplRes , errRes := storage . ExpandSamples ( sres . Iterator ( ) , nil )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , errExp , errRes )
testutil . Equals ( t , smplExp , smplRes )
testutil . Equals ( t , smplExp , smplRes )
@ -1550,6 +1580,8 @@ func TestQuerierWithBoundaryChunks(t *testing.T) {
for i := int64 ( 0 ) ; i < 5 ; i ++ {
for i := int64 ( 0 ) ; i < 5 ; i ++ {
_ , err := app . Add ( label , i * blockRange , 0 )
_ , err := app . Add ( label , i * blockRange , 0 )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
_ , err = app . Add ( labels . FromStrings ( "blockID" , strconv . FormatInt ( i , 10 ) ) , i * blockRange , 0 )
testutil . Ok ( t , err )
}
}
err := app . Commit ( )
err := app . Commit ( )
@ -1564,9 +1596,11 @@ func TestQuerierWithBoundaryChunks(t *testing.T) {
testutil . Ok ( t , err )
testutil . Ok ( t , err )
defer q . Close ( )
defer q . Close ( )
// The requested interval covers 2 blocks, so the querier should contain 2 blocks.
// The requested interval covers 2 blocks, so the querier's label values for blockID should give us 2 values, one from each block.
count := len ( q . ( * querier ) . blocks )
b , ws , err := q . LabelValues ( "blockID" )
testutil . Assert ( t , count == 2 , "expected 2 blocks in querier, got %d" , count )
testutil . Ok ( t , err )
testutil . Equals ( t , storage . Warnings ( nil ) , ws )
testutil . Equals ( t , [ ] string { "1" , "2" } , b )
}
}
// TestInitializeHeadTimestamp ensures that the h.minTime is set properly.
// TestInitializeHeadTimestamp ensures that the h.minTime is set properly.
@ -1930,371 +1964,6 @@ func TestCorrectNumTombstones(t *testing.T) {
testutil . Equals ( t , uint64 ( 3 ) , db . blocks [ 0 ] . meta . Stats . NumTombstones )
testutil . Equals ( t , uint64 ( 3 ) , db . blocks [ 0 ] . meta . Stats . NumTombstones )
}
}
func TestVerticalCompaction ( t * testing . T ) {
cases := [ ] struct {
blockSeries [ ] [ ] storage . Series
expSeries map [ string ] [ ] tsdbutil . Sample
expBlockNum int
expOverlappingBlocks int
} {
// Case 0
// |--------------|
// |----------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 7 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } , sample { 11 , 99 } ,
sample { 12 , 99 } , sample { 13 , 99 } , sample { 14 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } , sample { 11 , 99 } ,
sample { 12 , 99 } , sample { 13 , 99 } , sample { 14 , 99 } ,
} } ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 1
// |-------------------------------|
// |----------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 7 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } ,
sample { 11 , 0 } , sample { 13 , 0 } , sample { 17 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } , sample { 11 , 0 } ,
sample { 13 , 0 } , sample { 17 , 0 } ,
} } ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 2
// |-------------------------------|
// |------------|
// |--------------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 7 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } ,
sample { 11 , 0 } , sample { 13 , 0 } , sample { 17 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 14 , 59 } , sample { 15 , 59 } , sample { 17 , 59 } , sample { 20 , 59 } ,
sample { 21 , 59 } , sample { 22 , 59 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 11 , 0 } , sample { 13 , 0 } ,
sample { 14 , 59 } , sample { 15 , 59 } , sample { 17 , 59 } , sample { 20 , 59 } ,
sample { 21 , 59 } , sample { 22 , 59 } ,
} } ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 3
// |-------------------|
// |--------------------|
// |----------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 14 , 59 } , sample { 15 , 59 } , sample { 17 , 59 } , sample { 20 , 59 } ,
sample { 21 , 59 } , sample { 22 , 59 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 15 , 99 } ,
sample { 16 , 99 } , sample { 17 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 14 , 59 } ,
sample { 15 , 59 } , sample { 16 , 99 } , sample { 17 , 59 } , sample { 20 , 59 } ,
sample { 21 , 59 } , sample { 22 , 59 } ,
} } ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 4
// |-------------------------------------|
// |------------|
// |-------------------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } , sample { 10 , 0 } ,
sample { 13 , 0 } , sample { 15 , 0 } , sample { 16 , 0 } , sample { 17 , 0 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 15 , 99 } ,
sample { 16 , 99 } , sample { 17 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 59 } ,
sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } , sample { 11 , 59 } ,
sample { 13 , 99 } , sample { 15 , 99 } , sample { 16 , 99 } , sample { 17 , 99 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} } ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 5: series are merged properly when there are multiple series.
// |-------------------------------------|
// |------------|
// |-------------------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } , sample { 10 , 0 } ,
sample { 13 , 0 } , sample { 15 , 0 } , sample { 16 , 0 } , sample { 17 , 0 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ) ,
newSeries ( map [ string ] string { "b" : "c" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } , sample { 10 , 0 } ,
sample { 13 , 0 } , sample { 15 , 0 } , sample { 16 , 0 } , sample { 17 , 0 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ) ,
newSeries ( map [ string ] string { "c" : "d" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } , sample { 10 , 0 } ,
sample { 13 , 0 } , sample { 15 , 0 } , sample { 16 , 0 } , sample { 17 , 0 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "__name__" : "a" } , [ ] tsdbutil . Sample {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ) ,
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ) ,
newSeries ( map [ string ] string { "aa" : "bb" } , [ ] tsdbutil . Sample {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ) ,
newSeries ( map [ string ] string { "c" : "d" } , [ ] tsdbutil . Sample {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 15 , 99 } ,
sample { 16 , 99 } , sample { 17 , 99 } ,
} ) ,
newSeries ( map [ string ] string { "aa" : "bb" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 15 , 99 } ,
sample { 16 , 99 } , sample { 17 , 99 } ,
} ) ,
newSeries ( map [ string ] string { "c" : "d" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 8 , 99 } ,
sample { 9 , 99 } , sample { 10 , 99 } , sample { 13 , 99 } , sample { 15 , 99 } ,
sample { 16 , 99 } , sample { 17 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample {
` { __name__="a"} ` : {
sample { 7 , 59 } , sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } ,
sample { 11 , 59 } ,
} ,
` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 59 } ,
sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } , sample { 11 , 59 } ,
sample { 13 , 99 } , sample { 15 , 99 } , sample { 16 , 99 } , sample { 17 , 99 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ,
` { aa="bb"} ` : {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 59 } ,
sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } , sample { 11 , 59 } ,
sample { 13 , 99 } , sample { 15 , 99 } , sample { 16 , 99 } , sample { 17 , 99 } ,
} ,
` { b="c"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } , sample { 10 , 0 } ,
sample { 13 , 0 } , sample { 15 , 0 } , sample { 16 , 0 } , sample { 17 , 0 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ,
` { c="d"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 59 } ,
sample { 8 , 59 } , sample { 9 , 59 } , sample { 10 , 59 } , sample { 11 , 59 } ,
sample { 13 , 99 } , sample { 15 , 99 } , sample { 16 , 99 } , sample { 17 , 99 } ,
sample { 20 , 0 } , sample { 22 , 0 } ,
} ,
} ,
expBlockNum : 1 ,
expOverlappingBlocks : 1 ,
} ,
// Case 6
// |--------------|
// |----------------|
// |--------------|
// |----------------|
{
blockSeries : [ ] [ ] storage . Series {
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 4 , 0 } ,
sample { 5 , 0 } , sample { 7 , 0 } , sample { 8 , 0 } , sample { 9 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 3 , 99 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } , sample { 11 , 99 } ,
sample { 12 , 99 } , sample { 13 , 99 } , sample { 14 , 99 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 20 , 0 } , sample { 21 , 0 } , sample { 22 , 0 } , sample { 24 , 0 } ,
sample { 25 , 0 } , sample { 27 , 0 } , sample { 28 , 0 } , sample { 29 , 0 } ,
} ) ,
} ,
{
newSeries ( map [ string ] string { "a" : "b" } , [ ] tsdbutil . Sample {
sample { 23 , 99 } , sample { 25 , 99 } , sample { 26 , 99 } , sample { 27 , 99 } ,
sample { 28 , 99 } , sample { 29 , 99 } , sample { 30 , 99 } , sample { 31 , 99 } ,
} ) ,
} ,
} ,
expSeries : map [ string ] [ ] tsdbutil . Sample { ` { a="b"} ` : {
sample { 0 , 0 } , sample { 1 , 0 } , sample { 2 , 0 } , sample { 3 , 99 } ,
sample { 4 , 0 } , sample { 5 , 99 } , sample { 6 , 99 } , sample { 7 , 99 } ,
sample { 8 , 99 } , sample { 9 , 99 } , sample { 10 , 99 } , sample { 11 , 99 } ,
sample { 12 , 99 } , sample { 13 , 99 } , sample { 14 , 99 } ,
sample { 20 , 0 } , sample { 21 , 0 } , sample { 22 , 0 } , sample { 23 , 99 } ,
sample { 24 , 0 } , sample { 25 , 99 } , sample { 26 , 99 } , sample { 27 , 99 } ,
sample { 28 , 99 } , sample { 29 , 99 } , sample { 30 , 99 } , sample { 31 , 99 } ,
} } ,
expBlockNum : 2 ,
expOverlappingBlocks : 2 ,
} ,
}
defaultMatcher := labels . MustNewMatcher ( labels . MatchRegexp , "__name__" , ".*" )
for _ , c := range cases {
if ok := t . Run ( "" , func ( t * testing . T ) {
tmpdir , err := ioutil . TempDir ( "" , "data" )
testutil . Ok ( t , err )
defer func ( ) {
testutil . Ok ( t , os . RemoveAll ( tmpdir ) )
} ( )
for _ , series := range c . blockSeries {
createBlock ( t , tmpdir , series )
}
opts := DefaultOptions ( )
opts . AllowOverlappingBlocks = true
db , err := Open ( tmpdir , nil , nil , opts )
testutil . Ok ( t , err )
defer func ( ) {
testutil . Ok ( t , db . Close ( ) )
} ( )
db . DisableCompactions ( )
testutil . Assert ( t , len ( db . blocks ) == len ( c . blockSeries ) , "Wrong number of blocks [before compact]." )
// Vertical Query Merging test.
querier , err := db . Querier ( context . TODO ( ) , 0 , 100 )
testutil . Ok ( t , err )
actSeries := query ( t , querier , defaultMatcher )
testutil . Equals ( t , c . expSeries , actSeries )
// Vertical compaction.
lc := db . compactor . ( * LeveledCompactor )
testutil . Equals ( t , 0 , int ( prom_testutil . ToFloat64 ( lc . metrics . overlappingBlocks ) ) , "overlapping blocks count should be still 0 here" )
err = db . Compact ( )
testutil . Ok ( t , err )
testutil . Equals ( t , c . expBlockNum , len ( db . Blocks ( ) ) , "Wrong number of blocks [after compact]" )
testutil . Equals ( t , c . expOverlappingBlocks , int ( prom_testutil . ToFloat64 ( lc . metrics . overlappingBlocks ) ) , "overlapping blocks count mismatch" )
// Query test after merging the overlapping blocks.
querier , err = db . Querier ( context . TODO ( ) , 0 , 100 )
testutil . Ok ( t , err )
actSeries = query ( t , querier , defaultMatcher )
testutil . Equals ( t , c . expSeries , actSeries )
} ) ; ! ok {
return
}
}
}
// TestBlockRanges checks the following use cases:
// TestBlockRanges checks the following use cases:
// - No samples can be added with timestamps lower than the last block maxt.
// - No samples can be added with timestamps lower than the last block maxt.
// - The compactor doesn't create overlapping blocks
// - The compactor doesn't create overlapping blocks
@ -2396,14 +2065,14 @@ func TestBlockRanges(t *testing.T) {
// It also checks that the API calls return equivalent results as a normal db.Open() mode.
// It also checks that the API calls return equivalent results as a normal db.Open() mode.
func TestDBReadOnly ( t * testing . T ) {
func TestDBReadOnly ( t * testing . T ) {
var (
var (
dbDir string
dbDir string
logger = log . NewLogfmtLogger ( log . NewSyncWriter ( os . Stderr ) )
logger = log . NewLogfmtLogger ( log . NewSyncWriter ( os . Stderr ) )
expBlocks [ ] * Block
expBlocks [ ] * Block
expSeries map [ string ] [ ] tsdbutil . Sample
expSeries map [ string ] [ ] tsdbutil . Sample
exp SeriesCount int
exp Chunks map [ string ] [ ] chunks . Meta
expDBHash [ ] byte
expDBHash [ ] byte
matchAll = labels . MustNewMatcher ( labels . MatchEqual , "" , "" )
matchAll = labels . MustNewMatcher ( labels . MatchEqual , "" , "" )
err error
err error
)
)
// Bootstrap the db.
// Bootstrap the db.
@ -2416,15 +2085,21 @@ func TestDBReadOnly(t *testing.T) {
} ( )
} ( )
dbBlocks := [ ] * BlockMeta {
dbBlocks := [ ] * BlockMeta {
{ MinTime : 10 , MaxTime : 11 } ,
// Create three 2-sample blocks.
{ MinTime : 11 , MaxTime : 12 } ,
{ MinTime : 10 , MaxTime : 12 } ,
{ MinTime : 12 , MaxTime : 13 } ,
{ MinTime : 12 , MaxTime : 14 } ,
{ MinTime : 14 , MaxTime : 16 } ,
}
}
for _ , m := range dbBlocks {
for _ , m := range dbBlocks {
createBlock ( t , dbDir , genSeries ( 1 , 1 , m . MinTime , m . MaxTime ) )
_ = createBlock ( t , dbDir , genSeries ( 1 , 1 , m . MinTime , m . MaxTime ) )
}
}
expSeriesCount ++
// Add head to test DBReadOnly WAL reading capabilities.
w , err := wal . New ( logger , nil , filepath . Join ( dbDir , "wal" ) , true )
testutil . Ok ( t , err )
h := createHead ( t , w , genSeries ( 1 , 1 , 16 , 18 ) , dbDir )
testutil . Ok ( t , h . Close ( ) )
}
}
// Open a normal db to use for a comparison.
// Open a normal db to use for a comparison.
@ -2439,7 +2114,6 @@ func TestDBReadOnly(t *testing.T) {
_ , err = app . Add ( labels . FromStrings ( "foo" , "bar" ) , dbWritable . Head ( ) . MaxTime ( ) + 1 , 0 )
_ , err = app . Add ( labels . FromStrings ( "foo" , "bar" ) , dbWritable . Head ( ) . MaxTime ( ) + 1 , 0 )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
testutil . Ok ( t , app . Commit ( ) )
testutil . Ok ( t , app . Commit ( ) )
expSeriesCount ++
expBlocks = dbWritable . Blocks ( )
expBlocks = dbWritable . Blocks ( )
expDbSize , err := fileutil . DirSize ( dbWritable . Dir ( ) )
expDbSize , err := fileutil . DirSize ( dbWritable . Dir ( ) )
@ -2449,35 +2123,49 @@ func TestDBReadOnly(t *testing.T) {
q , err := dbWritable . Querier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
q , err := dbWritable . Querier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
expSeries = query ( t , q , matchAll )
expSeries = query ( t , q , matchAll )
cq , err := dbWritable . ChunkQuerier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( t , err )
expChunks = queryChunks ( t , cq , matchAll )
testutil . Ok ( t , dbWritable . Close ( ) ) // Close here to allow getting the dir hash for windows.
testutil . Ok ( t , dbWritable . Close ( ) ) // Close here to allow getting the dir hash for windows.
expDBHash = testutil . DirHash ( t , dbWritable . Dir ( ) )
expDBHash = testutil . DirHash ( t , dbWritable . Dir ( ) )
}
}
// Open a read only db and ensure that the API returns the same result as the normal DB.
// Open a read only db and ensure that the API returns the same result as the normal DB.
{
dbReadOnly , err := OpenDBReadOnly ( dbDir , logger )
dbReadOnly , err := OpenDBReadOnly ( dbDir , logger )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
defer func ( ) { testutil . Ok ( t , dbReadOnly . Close ( ) ) } ( )
defer func ( ) {
testutil . Ok ( t , dbReadOnly . Close ( ) )
t . Run ( "blocks" , func ( t * testing . T ) {
} ( )
blocks , err := dbReadOnly . Blocks ( )
blocks , err := dbReadOnly . Blocks ( )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
testutil . Equals ( t , len ( expBlocks ) , len ( blocks ) )
testutil . Equals ( t , len ( expBlocks ) , len ( blocks ) )
for i , expBlock := range expBlocks {
for i , expBlock := range expBlocks {
testutil . Equals ( t , expBlock . Meta ( ) , blocks [ i ] . Meta ( ) , "block meta mismatch" )
testutil . Equals ( t , expBlock . Meta ( ) , blocks [ i ] . Meta ( ) , "block meta mismatch" )
}
}
} )
t . Run ( "querier" , func ( t * testing . T ) {
// Open a read only db and ensure that the API returns the same result as the normal DB.
q , err := dbReadOnly . Querier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
q , err := dbReadOnly . Querier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( t , err )
testutil . Ok ( t , err )
readOnlySeries := query ( t , q , matchAll )
readOnlySeries := query ( t , q , matchAll )
readOnlyDBHash := testutil . DirHash ( t , dbDir )
readOnlyDBHash := testutil . DirHash ( t , dbDir )
testutil . Equals ( t , expSeriesCount , len ( readOnlySeries ) , "total series mismatch" )
testutil . Equals ( t , len ( expSeries ) , len ( readOnlySeries ) , "total series mismatch" )
testutil . Equals ( t , expSeries , readOnlySeries , "series mismatch" )
testutil . Equals ( t , expSeries , readOnlySeries , "series mismatch" )
testutil . Equals ( t , expDBHash , readOnlyDBHash , "after all read operations the db hash should remain the same" )
testutil . Equals ( t , expDBHash , readOnlyDBHash , "after all read operations the db hash should remain the same" )
}
} )
t . Run ( "chunk querier" , func ( t * testing . T ) {
cq , err := dbReadOnly . ChunkQuerier ( context . TODO ( ) , math . MinInt64 , math . MaxInt64 )
testutil . Ok ( t , err )
readOnlySeries := queryChunks ( t , cq , matchAll )
readOnlyDBHash := testutil . DirHash ( t , dbDir )
testutil . Equals ( t , len ( expChunks ) , len ( readOnlySeries ) , "total series mismatch" )
testutil . Equals ( t , expChunks , readOnlySeries , "series chunks mismatch" )
testutil . Equals ( t , expDBHash , readOnlyDBHash , "after all read operations the db hash should remain the same" )
} )
}
}
// TestDBReadOnlyClosing ensures that after closing the db
// TestDBReadOnlyClosing ensures that after closing the db