mirror of https://github.com/prometheus/prometheus
Merge branch 'beorn7/persistence' into beorn7/ingestion-tweaks
commit
a075900f9a
|
@ -639,15 +639,12 @@ func evalScalarBinop(opType BinOpType,
|
||||||
case Mul:
|
case Mul:
|
||||||
return lhs * rhs
|
return lhs * rhs
|
||||||
case Div:
|
case Div:
|
||||||
if rhs != 0 {
|
return lhs / rhs
|
||||||
return lhs / rhs
|
|
||||||
}
|
|
||||||
return clientmodel.SampleValue(math.Inf(int(rhs)))
|
|
||||||
case Mod:
|
case Mod:
|
||||||
if rhs != 0 {
|
if rhs != 0 {
|
||||||
return clientmodel.SampleValue(int(lhs) % int(rhs))
|
return clientmodel.SampleValue(int(lhs) % int(rhs))
|
||||||
}
|
}
|
||||||
return clientmodel.SampleValue(math.Inf(int(rhs)))
|
return clientmodel.SampleValue(math.NaN())
|
||||||
case EQ:
|
case EQ:
|
||||||
if lhs == rhs {
|
if lhs == rhs {
|
||||||
return 1
|
return 1
|
||||||
|
@ -693,15 +690,12 @@ func evalVectorBinop(opType BinOpType,
|
||||||
case Mul:
|
case Mul:
|
||||||
return lhs * rhs, true
|
return lhs * rhs, true
|
||||||
case Div:
|
case Div:
|
||||||
if rhs != 0 {
|
return lhs / rhs, true
|
||||||
return lhs / rhs, true
|
|
||||||
}
|
|
||||||
return clientmodel.SampleValue(math.Inf(int(rhs))), true
|
|
||||||
case Mod:
|
case Mod:
|
||||||
if rhs != 0 {
|
if rhs != 0 {
|
||||||
return clientmodel.SampleValue(int(lhs) % int(rhs)), true
|
return clientmodel.SampleValue(int(lhs) % int(rhs)), true
|
||||||
}
|
}
|
||||||
return clientmodel.SampleValue(math.Inf(int(rhs))), true
|
return clientmodel.SampleValue(math.NaN()), true
|
||||||
case EQ:
|
case EQ:
|
||||||
if lhs == rhs {
|
if lhs == rhs {
|
||||||
return lhs, true
|
return lhs, true
|
||||||
|
|
|
@ -456,6 +456,50 @@ func ceilImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
return vector
|
return vector
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// === exp(vector VectorNode) Vector ===
|
||||||
|
func expImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
|
n := args[0].(VectorNode)
|
||||||
|
vector := n.Eval(timestamp)
|
||||||
|
for _, el := range vector {
|
||||||
|
el.Metric.Delete(clientmodel.MetricNameLabel)
|
||||||
|
el.Value = clientmodel.SampleValue(math.Exp(float64(el.Value)))
|
||||||
|
}
|
||||||
|
return vector
|
||||||
|
}
|
||||||
|
|
||||||
|
// === ln(vector VectorNode) Vector ===
|
||||||
|
func lnImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
|
n := args[0].(VectorNode)
|
||||||
|
vector := n.Eval(timestamp)
|
||||||
|
for _, el := range vector {
|
||||||
|
el.Metric.Delete(clientmodel.MetricNameLabel)
|
||||||
|
el.Value = clientmodel.SampleValue(math.Log(float64(el.Value)))
|
||||||
|
}
|
||||||
|
return vector
|
||||||
|
}
|
||||||
|
|
||||||
|
// === log2(vector VectorNode) Vector ===
|
||||||
|
func log2Impl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
|
n := args[0].(VectorNode)
|
||||||
|
vector := n.Eval(timestamp)
|
||||||
|
for _, el := range vector {
|
||||||
|
el.Metric.Delete(clientmodel.MetricNameLabel)
|
||||||
|
el.Value = clientmodel.SampleValue(math.Log2(float64(el.Value)))
|
||||||
|
}
|
||||||
|
return vector
|
||||||
|
}
|
||||||
|
|
||||||
|
// === log10(vector VectorNode) Vector ===
|
||||||
|
func log10Impl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
|
n := args[0].(VectorNode)
|
||||||
|
vector := n.Eval(timestamp)
|
||||||
|
for _, el := range vector {
|
||||||
|
el.Metric.Delete(clientmodel.MetricNameLabel)
|
||||||
|
el.Value = clientmodel.SampleValue(math.Log10(float64(el.Value)))
|
||||||
|
}
|
||||||
|
return vector
|
||||||
|
}
|
||||||
|
|
||||||
// === deriv(node MatrixNode) Vector ===
|
// === deriv(node MatrixNode) Vector ===
|
||||||
func derivImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
func derivImpl(timestamp clientmodel.Timestamp, args []Node) interface{} {
|
||||||
matrixNode := args[0].(MatrixNode)
|
matrixNode := args[0].(MatrixNode)
|
||||||
|
@ -598,6 +642,12 @@ var functions = map[string]*Function{
|
||||||
returnType: VectorType,
|
returnType: VectorType,
|
||||||
callFn: dropCommonLabelsImpl,
|
callFn: dropCommonLabelsImpl,
|
||||||
},
|
},
|
||||||
|
"exp": {
|
||||||
|
name: "exp",
|
||||||
|
argTypes: []ExprType{VectorType},
|
||||||
|
returnType: VectorType,
|
||||||
|
callFn: expImpl,
|
||||||
|
},
|
||||||
"floor": {
|
"floor": {
|
||||||
name: "floor",
|
name: "floor",
|
||||||
argTypes: []ExprType{VectorType},
|
argTypes: []ExprType{VectorType},
|
||||||
|
@ -610,6 +660,24 @@ var functions = map[string]*Function{
|
||||||
returnType: VectorType,
|
returnType: VectorType,
|
||||||
callFn: histogramQuantileImpl,
|
callFn: histogramQuantileImpl,
|
||||||
},
|
},
|
||||||
|
"ln": {
|
||||||
|
name: "ln",
|
||||||
|
argTypes: []ExprType{VectorType},
|
||||||
|
returnType: VectorType,
|
||||||
|
callFn: lnImpl,
|
||||||
|
},
|
||||||
|
"log10": {
|
||||||
|
name: "log10",
|
||||||
|
argTypes: []ExprType{VectorType},
|
||||||
|
returnType: VectorType,
|
||||||
|
callFn: log10Impl,
|
||||||
|
},
|
||||||
|
"log2": {
|
||||||
|
name: "log2",
|
||||||
|
argTypes: []ExprType{VectorType},
|
||||||
|
returnType: VectorType,
|
||||||
|
callFn: log2Impl,
|
||||||
|
},
|
||||||
"max_over_time": {
|
"max_over_time": {
|
||||||
name: "max_over_time",
|
name: "max_over_time",
|
||||||
argTypes: []ExprType{MatrixType},
|
argTypes: []ExprType{MatrixType},
|
||||||
|
|
|
@ -36,7 +36,7 @@ var (
|
||||||
testEvalTime = testStartTime.Add(testSampleInterval * 10)
|
testEvalTime = testStartTime.Add(testSampleInterval * 10)
|
||||||
fixturesPath = "fixtures"
|
fixturesPath = "fixtures"
|
||||||
|
|
||||||
reSample = regexp.MustCompile(`^(.*)(?: \=\>|:) (\-?\d+\.?\d*e?\d*|[+-]Inf|NaN) \@\[(\d+)\]$`)
|
reSample = regexp.MustCompile(`^(.*)(?: \=\>|:) (\-?\d+\.?\d*(?:e-?\d*)?|[+-]Inf|NaN) \@\[(\d+)\]$`)
|
||||||
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1157,6 +1157,137 @@ func TestExpressions(t *testing.T) {
|
||||||
expr: `999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999`,
|
expr: `999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999`,
|
||||||
output: []string{`scalar: +Inf @[%v]`},
|
output: []string{`scalar: +Inf @[%v]`},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
expr: `1 / 0`,
|
||||||
|
output: []string{`scalar: +Inf @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `-1 / 0`,
|
||||||
|
output: []string{`scalar: -Inf @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `0 / 0`,
|
||||||
|
output: []string{`scalar: NaN @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `1 % 0`,
|
||||||
|
output: []string{`scalar: NaN @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `http_requests{group="canary", instance="0", job="api-server"} / 0`,
|
||||||
|
output: []string{
|
||||||
|
`{group="canary", instance="0", job="api-server"} => +Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `-1 * http_requests{group="canary", instance="0", job="api-server"} / 0`,
|
||||||
|
output: []string{
|
||||||
|
`{group="canary", instance="0", job="api-server"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `0 * http_requests{group="canary", instance="0", job="api-server"} / 0`,
|
||||||
|
output: []string{
|
||||||
|
`{group="canary", instance="0", job="api-server"} => NaN @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `0 * http_requests{group="canary", instance="0", job="api-server"} % 0`,
|
||||||
|
output: []string{
|
||||||
|
`{group="canary", instance="0", job="api-server"} => NaN @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `exp(vector_matching_a)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => 22026.465794806718 @[%v]`,
|
||||||
|
`{l="y"} => 485165195.4097903 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `exp(vector_matching_a - 10)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => 22026.465794806718 @[%v]`,
|
||||||
|
`{l="x"} => 1 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `exp(vector_matching_a - 20)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => 4.5399929762484854e-05 @[%v]`,
|
||||||
|
`{l="y"} => 1 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `ln(vector_matching_a)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => 2.302585092994046 @[%v]`,
|
||||||
|
`{l="y"} => 2.995732273553991 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `ln(vector_matching_a - 10)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => 2.302585092994046 @[%v]`,
|
||||||
|
`{l="x"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `ln(vector_matching_a - 20)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => -Inf @[%v]`,
|
||||||
|
`{l="x"} => NaN @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `exp(ln(vector_matching_a))`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => 20 @[%v]`,
|
||||||
|
`{l="x"} => 10 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log2(vector_matching_a)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => 3.3219280948873626 @[%v]`,
|
||||||
|
`{l="y"} => 4.321928094887363 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log2(vector_matching_a - 10)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => 3.3219280948873626 @[%v]`,
|
||||||
|
`{l="x"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log2(vector_matching_a - 20)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => NaN @[%v]`,
|
||||||
|
`{l="y"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log10(vector_matching_a)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => 1 @[%v]`,
|
||||||
|
`{l="y"} => 1.301029995663981 @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log10(vector_matching_a - 10)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="y"} => 1 @[%v]`,
|
||||||
|
`{l="x"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `log10(vector_matching_a - 20)`,
|
||||||
|
output: []string{
|
||||||
|
`{l="x"} => NaN @[%v]`,
|
||||||
|
`{l="y"} => -Inf @[%v]`,
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
storage, closer := newTestStorage(t)
|
storage, closer := newTestStorage(t)
|
||||||
|
|
|
@ -225,7 +225,9 @@ func (p *persistence) sanitizeSeries(
|
||||||
// Everything is consistent. We are good.
|
// Everything is consistent. We are good.
|
||||||
return fp, true
|
return fp, true
|
||||||
}
|
}
|
||||||
// If we are here, something's fishy.
|
// If we are here, we cannot be sure the series file is
|
||||||
|
// consistent with the checkpoint, so we have to take a closer
|
||||||
|
// look.
|
||||||
if s.headChunkClosed {
|
if s.headChunkClosed {
|
||||||
// This is the easy case as we don't have any chunks in
|
// This is the easy case as we don't have any chunks in
|
||||||
// heads.db. Treat this series as a freshly unarchived
|
// heads.db. Treat this series as a freshly unarchived
|
||||||
|
|
|
@ -911,7 +911,7 @@ func (p *persistence) dropAndPersistChunks(
|
||||||
// Otherwise, seek backwards to the beginning of its header and start
|
// Otherwise, seek backwards to the beginning of its header and start
|
||||||
// copying everything from there into a new file. Then append the chunks
|
// copying everything from there into a new file. Then append the chunks
|
||||||
// to the new file.
|
// to the new file.
|
||||||
_, err = f.Seek(-(chunkHeaderLen), os.SEEK_CUR)
|
_, err = f.Seek(-chunkHeaderLen, os.SEEK_CUR)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ const (
|
||||||
chunkDescEvictionFactor = 10
|
chunkDescEvictionFactor = 10
|
||||||
|
|
||||||
headChunkTimeout = time.Hour // Close head chunk if not touched for that long.
|
headChunkTimeout = time.Hour // Close head chunk if not touched for that long.
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// fingerprintSeriesPair pairs a fingerprint with a memorySeries pointer.
|
// fingerprintSeriesPair pairs a fingerprint with a memorySeries pointer.
|
||||||
|
@ -168,7 +167,7 @@ type memorySeries struct {
|
||||||
// appended.
|
// appended.
|
||||||
headChunkUsedByIterator bool
|
headChunkUsedByIterator bool
|
||||||
// Whether the series is inconsistent with the last checkpoint in a way
|
// Whether the series is inconsistent with the last checkpoint in a way
|
||||||
// that would require a desk seek during crash recovery.
|
// that would require a disk seek during crash recovery.
|
||||||
dirty bool
|
dirty bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,7 +232,7 @@ func (s *memorySeries) add(v *metric.SamplePair) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
// maybeCloseHeadChunk closes the head chunk if it has not been touched for the
|
// maybeCloseHeadChunk closes the head chunk if it has not been touched for the
|
||||||
// duration of headChunkTimeout. It returns wether the head chunk was closed.
|
// duration of headChunkTimeout. It returns whether the head chunk was closed.
|
||||||
// If the head chunk is already closed, the method is a no-op and returns false.
|
// If the head chunk is already closed, the method is a no-op and returns false.
|
||||||
//
|
//
|
||||||
// The caller must have locked the fingerprint of the series.
|
// The caller must have locked the fingerprint of the series.
|
||||||
|
|
|
@ -761,8 +761,8 @@ func (s *memorySeriesStorage) maintainMemorySeries(
|
||||||
}
|
}
|
||||||
|
|
||||||
// writeMemorySeries (re-)writes a memory series file. While doing so, it drops
|
// writeMemorySeries (re-)writes a memory series file. While doing so, it drops
|
||||||
// chunks older than beforeTime from both, the series file (if it exists) as
|
// chunks older than beforeTime from both the series file (if it exists) as well
|
||||||
// well as from memory. The provided chunksToPersist are appended to the newly
|
// as from memory. The provided chunksToPersist are appended to the newly
|
||||||
// written series file. If no chunks need to be purged, but chunksToPersist is
|
// written series file. If no chunks need to be purged, but chunksToPersist is
|
||||||
// not empty, those chunks are simply appended to the series file. If the series
|
// not empty, those chunks are simply appended to the series file. If the series
|
||||||
// contains no chunks after dropping old chunks, it is purged entirely. In that
|
// contains no chunks after dropping old chunks, it is purged entirely. In that
|
||||||
|
|
Loading…
Reference in New Issue