@ -862,322 +862,16 @@ func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f model.Fingerprint)
return
}
func ( l * LevelDBMetricPersistence ) GetBoundaryValues ( fp model . Fingerprint , i model . Interval , s StalenessPolicy ) ( open * model . Sample , end * model . Sample , err error ) {
begin := time . Now ( )
defer func ( ) {
duration := time . Since ( begin )
recordOutcome ( duration , err , map [ string ] string { operation : getBoundaryValues , result : success } , map [ string ] string { operation : getBoundaryValues , result : failure } )
} ( )
// XXX: Maybe we will want to emit incomplete sets?
open , err = l . GetValueAtTime ( fp , i . OldestInclusive , s )
if err != nil {
return
} else if open == nil {
return
}
end , err = l . GetValueAtTime ( fp , i . NewestInclusive , s )
if err != nil {
return
} else if end == nil {
open = nil
}
return
}
func interpolate ( x1 , x2 time . Time , y1 , y2 float32 , e time . Time ) float32 {
yDelta := y2 - y1
xDelta := x2 . Sub ( x1 )
dDt := yDelta / float32 ( xDelta )
offset := float32 ( e . Sub ( x1 ) )
return y1 + ( offset * dDt )
}
func ( l * LevelDBMetricPersistence ) GetValueAtTime ( fp model . Fingerprint , t time . Time , s StalenessPolicy ) ( sample * model . Sample , err error ) {
begin := time . Now ( )
defer func ( ) {
duration := time . Since ( begin )
recordOutcome ( duration , err , map [ string ] string { operation : getValueAtTime , result : success } , map [ string ] string { operation : getValueAtTime , result : failure } )
} ( )
// TODO: memoize/cache this or change the return type to metric.SamplePair.
m , err := l . GetMetricForFingerprint ( fp )
if err != nil {
return
}
// Candidate for Refactoring
k := & dto . SampleKey {
Fingerprint : fp . ToDTO ( ) ,
Timestamp : indexable . EncodeTime ( t ) ,
}
e , err := coding . NewProtocolBuffer ( k ) . Encode ( )
if err != nil {
return
}
iterator := l . metricSamples . NewIterator ( true )
defer iterator . Close ( )
if ! iterator . Seek ( e ) {
/ *
* Two cases for this :
* 1. ) Corruption in LevelDB .
* 2. ) Key seek after AND outside known range .
*
* Once a LevelDB iterator goes invalid , it cannot be recovered ; thusly ,
* we need to create a new in order to check if the last value in the
* database is sufficient for our purposes . This is , in all reality , a
* corner case but one that could bring down the system .
* /
iterator = l . metricSamples . NewIterator ( true )
defer iterator . Close ( )
if ! iterator . SeekToLast ( ) {
/ *
* For whatever reason , the LevelDB cannot be recovered .
* /
return
}
}
var (
firstKey * dto . SampleKey
firstValue * dto . SampleValueSeries
)
firstKey , err = extractSampleKey ( iterator )
if err != nil {
return
func ( l LevelDBMetricPersistence ) GetValueAtTime ( f model . Fingerprint , t time . Time ) ( samples [ ] model . SamplePair ) {
panic ( "Not implemented" )
}
peekAhead := false
if ! fingerprintsEqual ( firstKey . Fingerprint , k . Fingerprint ) {
/ *
* This allows us to grab values for metrics if our request time is after
* the last recorded time subject to the staleness policy due to the nuances
* of LevelDB storage :
*
* # Assumptions :
* - K0 < K1 in terms of sorting .
* - T0 < T1 in terms of sorting .
*
* # Data
*
* K0 - T0
* K0 - T1
* K0 - T2
* K1 - T0
* K1 - T1
*
* # Scenario
* K0 - T3 , which does not exist , is requested . LevelDB will thusly seek to
* K1 - T1 , when K0 - T2 exists as a perfectly good candidate to check subject
* to the provided staleness policy and such .
* /
peekAhead = true
}
firstTime := indexable . DecodeTime ( firstKey . Timestamp )
if t . Before ( firstTime ) || peekAhead {
if ! iterator . Previous ( ) {
/ *
* Two cases for this :
* 1. ) Corruption in LevelDB .
* 2. ) Key seek before AND outside known range .
*
* This is an explicit validation to ensure that if no previous values for
* the series are found , the query aborts .
* /
return
func ( l LevelDBMetricPersistence ) GetBoundaryValues ( f model . Fingerprint , i model . Interval ) ( first [ ] model . SamplePair , second [ ] model . SamplePair ) {
panic ( "Not implemented" )
}
var (
alternativeKey * dto . SampleKey
alternativeValue * dto . SampleValueSeries
)
alternativeKey , err = extractSampleKey ( iterator )
if err != nil {
return
}
if ! fingerprintsEqual ( alternativeKey . Fingerprint , k . Fingerprint ) {
return
}
/ *
* At this point , we found a previous value in the same series in the
* database . LevelDB originally seeked to the subsequent element given
* the key , but we need to consider this adjacency instead .
* /
alternativeTime := indexable . DecodeTime ( alternativeKey . Timestamp )
firstKey = alternativeKey
firstValue = alternativeValue
firstTime = alternativeTime
}
firstDelta := firstTime . Sub ( t )
if firstDelta < 0 {
firstDelta *= - 1
}
if firstDelta > s . DeltaAllowance {
return
}
firstValue , err = extractSampleValues ( iterator )
if err != nil {
return
}
sample = model . SampleFromDTO ( m , & t , firstValue )
if firstDelta == time . Duration ( 0 ) {
return
}
if ! iterator . Next ( ) {
/ *
* Two cases for this :
* 1. ) Corruption in LevelDB .
* 2. ) Key seek after AND outside known range .
*
* This means that there are no more values left in the storage ; and if this
* point is reached , we know that the one that has been found is within the
* allowed staleness limits .
* /
return
}
var secondKey * dto . SampleKey
secondKey , err = extractSampleKey ( iterator )
if err != nil {
return
}
if ! fingerprintsEqual ( secondKey . Fingerprint , k . Fingerprint ) {
return
} else {
/ *
* At this point , current entry in the database has the same key as the
* previous . For this reason , the validation logic will expect that the
* distance between the two points shall not exceed the staleness policy
* allowed limit to reduce interpolation errors .
*
* For this reason , the sample is reset in case of other subsequent
* validation behaviors .
* /
sample = nil
}
secondTime := indexable . DecodeTime ( secondKey . Timestamp )
totalDelta := secondTime . Sub ( firstTime )
if totalDelta > s . DeltaAllowance {
return
}
var secondValue * dto . SampleValueSeries
secondValue , err = extractSampleValues ( iterator )
if err != nil {
return
}
fValue := * firstValue . Value [ 0 ] . Value
sValue := * secondValue . Value [ 0 ] . Value
interpolated := interpolate ( firstTime , secondTime , fValue , sValue , t )
sampleValue := & dto . SampleValueSeries { }
sampleValue . Value = append ( sampleValue . Value , & dto . SampleValueSeries_Value { Value : & interpolated } )
sample = model . SampleFromDTO ( m , & t , sampleValue )
return
}
func ( l * LevelDBMetricPersistence ) GetRangeValues ( fp model . Fingerprint , i model . Interval ) ( v * model . SampleSet , err error ) {
begin := time . Now ( )
defer func ( ) {
duration := time . Since ( begin )
recordOutcome ( duration , err , map [ string ] string { operation : getRangeValues , result : success } , map [ string ] string { operation : getRangeValues , result : failure } )
} ( )
k := & dto . SampleKey {
Fingerprint : fp . ToDTO ( ) ,
Timestamp : indexable . EncodeTime ( i . OldestInclusive ) ,
}
e , err := coding . NewProtocolBuffer ( k ) . Encode ( )
if err != nil {
return
}
iterator := l . metricSamples . NewIterator ( true )
defer iterator . Close ( )
predicate := keyIsOlderThan ( i . NewestInclusive )
for valid := iterator . Seek ( e ) ; valid ; valid = iterator . Next ( ) {
retrievedKey := & dto . SampleKey { }
retrievedKey , err = extractSampleKey ( iterator )
if err != nil {
return
}
if predicate ( retrievedKey ) {
break
}
if ! fingerprintsEqual ( retrievedKey . Fingerprint , k . Fingerprint ) {
break
}
retrievedValue , err := extractSampleValues ( iterator )
if err != nil {
return nil , err
}
if v == nil {
// TODO: memoize/cache this or change the return type to metric.SamplePair.
m , err := l . GetMetricForFingerprint ( fp )
if err != nil {
return v , err
}
v = & model . SampleSet {
Metric : * m ,
}
}
v . Values = append ( v . Values , model . SamplePair {
Value : model . SampleValue ( * retrievedValue . Value [ 0 ] . Value ) ,
Timestamp : indexable . DecodeTime ( retrievedKey . Timestamp ) ,
} )
}
// XXX: We should not explicitly sort here but rather rely on the datastore.
// This adds appreciable overhead.
if v != nil {
sort . Sort ( v . Values )
}
return
func ( l * LevelDBMetricPersistence ) GetRangeValues ( f model . Fingerprint , i model . Interval ) ( samples [ ] model . SamplePair ) {
panic ( "Not implemented" )
}
type MetricKeyDecoder struct { }