mirror of https://github.com/prometheus/prometheus
commit
73e7ff1edd
|
@ -72,6 +72,7 @@ func Main() int {
|
|||
|
||||
log.Infoln("Starting prometheus", version.Info())
|
||||
log.Infoln("Build context", version.BuildContext())
|
||||
log.Infoln("Host details", Uname())
|
||||
|
||||
var (
|
||||
// sampleAppender = storage.Fanout{}
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build !linux
|
||||
|
||||
package main
|
||||
|
||||
import "runtime"
|
||||
|
||||
// Uname for any platform other than linux.
|
||||
func Uname() string {
|
||||
return "(" + runtime.GOOS + ")"
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// Uname returns the uname of the host machine.
|
||||
func Uname() string {
|
||||
buf := syscall.Utsname{}
|
||||
err := syscall.Uname(&buf)
|
||||
if err != nil {
|
||||
log.Fatal("Error!")
|
||||
}
|
||||
str := "(" + charsToString(buf.Sysname[:])
|
||||
str += " " + charsToString(buf.Release[:])
|
||||
str += " " + charsToString(buf.Version[:])
|
||||
str += " " + charsToString(buf.Machine[:])
|
||||
str += " " + charsToString(buf.Nodename[:])
|
||||
str += " " + charsToString(buf.Domainname[:]) + ")"
|
||||
return str
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build 386 amd64 arm64 mips64 mips64le mips mipsle
|
||||
// +build linux
|
||||
|
||||
package main
|
||||
|
||||
func charsToString(ca []int8) string {
|
||||
s := make([]byte, len(ca))
|
||||
for i, c := range ca {
|
||||
s[i] = byte(c)
|
||||
}
|
||||
return string(s[0:len(ca)])
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
// Copyright 2017 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build arm ppc64 ppc64le s390x
|
||||
// +build linux
|
||||
|
||||
package main
|
||||
|
||||
func charsToString(ca []uint8) string {
|
||||
s := make([]byte, len(ca))
|
||||
for i, c := range ca {
|
||||
s[i] = byte(c)
|
||||
}
|
||||
return string(s[0:len(ca)])
|
||||
}
|
|
@ -15,6 +15,7 @@ package rules
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
|
@ -151,7 +152,7 @@ const resolvedRetention = 15 * time.Minute
|
|||
|
||||
// Eval evaluates the rule expression and then creates pending alerts and fires
|
||||
// or removes previously pending alerts accordingly.
|
||||
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) {
|
||||
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURL *url.URL) (promql.Vector, error) {
|
||||
query, err := engine.NewInstantQuery(r.vector.String(), ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -194,7 +195,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.En
|
|||
tmplData,
|
||||
model.Time(timestamp.FromTime(ts)),
|
||||
engine,
|
||||
externalURLPath,
|
||||
externalURL,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
|
|
|
@ -112,7 +112,7 @@ const (
|
|||
type Rule interface {
|
||||
Name() string
|
||||
// eval evaluates the rule, including any associated recording or alerting actions.
|
||||
Eval(context.Context, time.Time, *promql.Engine, string) (promql.Vector, error)
|
||||
Eval(context.Context, time.Time, *promql.Engine, *url.URL) (promql.Vector, error)
|
||||
// String returns a human-readable string representation of the rule.
|
||||
String() string
|
||||
// HTMLSnippet returns a human-readable string representation of the rule,
|
||||
|
@ -270,7 +270,7 @@ func (g *Group) Eval() {
|
|||
|
||||
evalTotal.WithLabelValues(rtyp).Inc()
|
||||
|
||||
vector, err := rule.Eval(g.opts.Context, now, g.opts.QueryEngine, g.opts.ExternalURL.Path)
|
||||
vector, err := rule.Eval(g.opts.Context, now, g.opts.QueryEngine, g.opts.ExternalURL)
|
||||
if err != nil {
|
||||
// Canceled queries are intentional termination of queries. This normally
|
||||
// happens on shutdown and thus we skip logging of any errors here.
|
||||
|
|
|
@ -109,7 +109,7 @@ func TestAlertingRule(t *testing.T) {
|
|||
for i, test := range tests {
|
||||
evalTime := baseTime.Add(test.time)
|
||||
|
||||
res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), "")
|
||||
res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error during alerting rule evaluation: %s", err)
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ package rules
|
|||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
"golang.org/x/net/context"
|
||||
|
@ -47,7 +48,7 @@ func (rule RecordingRule) Name() string {
|
|||
}
|
||||
|
||||
// Eval evaluates the rule and then overrides the metric names and labels accordingly.
|
||||
func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ string) (promql.Vector, error) {
|
||||
func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ *url.URL) (promql.Vector, error) {
|
||||
query, err := engine.NewInstantQuery(rule.vector.String(), ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -64,7 +64,7 @@ func TestRuleEval(t *testing.T) {
|
|||
|
||||
for _, test := range suite {
|
||||
rule := NewRecordingRule(test.name, test.expr, test.labels)
|
||||
result, err := rule.Eval(ctx, now, engine, "")
|
||||
result, err := rule.Eval(ctx, now, engine, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("Error evaluating %s", test.name)
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
@ -110,7 +111,7 @@ type Expander struct {
|
|||
}
|
||||
|
||||
// NewTemplateExpander returns a template expander ready to use.
|
||||
func NewTemplateExpander(ctx context.Context, text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, pathPrefix string) *Expander {
|
||||
func NewTemplateExpander(ctx context.Context, text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, externalURL *url.URL) *Expander {
|
||||
return &Expander{
|
||||
text: text,
|
||||
name: name,
|
||||
|
@ -246,7 +247,10 @@ func NewTemplateExpander(ctx context.Context, text string, name string, data int
|
|||
return fmt.Sprint(t)
|
||||
},
|
||||
"pathPrefix": func() string {
|
||||
return pathPrefix
|
||||
return externalURL.Path
|
||||
},
|
||||
"externalURL": func() string {
|
||||
return externalURL.String()
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ package template
|
|||
|
||||
import (
|
||||
"math"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/prometheus/common/model"
|
||||
|
@ -198,6 +199,16 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
output: "x",
|
||||
html: true,
|
||||
},
|
||||
{
|
||||
// pathPrefix.
|
||||
text: "{{ pathPrefix }}",
|
||||
output: "/path/prefix",
|
||||
},
|
||||
{
|
||||
// externalURL.
|
||||
text: "{{ externalURL }}",
|
||||
output: "http://testhost:9090/path/prefix",
|
||||
},
|
||||
}
|
||||
|
||||
time := model.Time(0)
|
||||
|
@ -221,10 +232,15 @@ func TestTemplateExpansion(t *testing.T) {
|
|||
|
||||
engine := promql.NewEngine(storage, nil)
|
||||
|
||||
extURL, err := url.Parse("http://testhost:9090/path/prefix")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for i, s := range scenarios {
|
||||
var result string
|
||||
var err error
|
||||
expander := NewTemplateExpander(context.Background(), s.text, "test", s.input, time, engine, "")
|
||||
expander := NewTemplateExpander(context.Background(), s.text, "test", s.input, time, engine, extURL)
|
||||
if s.html {
|
||||
result, err = expander.ExpandHTML(nil)
|
||||
} else {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
govalidator
|
||||
===========
|
||||
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator) [![Coverage Status](https://img.shields.io/coveralls/asaskevich/govalidator.svg)](https://coveralls.io/r/asaskevich/govalidator?branch=master) [![wercker status](https://app.wercker.com/status/1ec990b09ea86c910d5f08b0e02c6043/s "wercker status")](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043)
|
||||
[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator)
|
||||
[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator)
|
||||
|
||||
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
|
||||
|
||||
|
@ -96,28 +96,27 @@ govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator
|
|||
func Abs(value float64) float64
|
||||
func BlackList(str, chars string) string
|
||||
func ByteLength(str string, params ...string) bool
|
||||
func StringLength(str string, params ...string) bool
|
||||
func StringMatches(s string, params ...string) bool
|
||||
func CamelCaseToUnderscore(str string) string
|
||||
func Contains(str, substring string) bool
|
||||
func Count(array []interface{}, iterator ConditionIterator) int
|
||||
func Each(array []interface{}, iterator Iterator)
|
||||
func ErrorByField(e error, field string) string
|
||||
func ErrorsByField(e error) map[string]string
|
||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{}
|
||||
func Find(array []interface{}, iterator ConditionIterator) interface{}
|
||||
func GetLine(s string, index int) (string, error)
|
||||
func GetLines(s string) []string
|
||||
func IsHost(s string) bool
|
||||
func InRange(value, left, right float64) bool
|
||||
func IsASCII(str string) bool
|
||||
func IsAlpha(str string) bool
|
||||
func IsAlphanumeric(str string) bool
|
||||
func IsBase64(str string) bool
|
||||
func IsByteLength(str string, min, max int) bool
|
||||
func IsCIDR(str string) bool
|
||||
func IsCreditCard(str string) bool
|
||||
func IsDNSName(str string) bool
|
||||
func IsDataURI(str string) bool
|
||||
func IsDialString(str string) bool
|
||||
func IsDNSName(str string) bool
|
||||
func IsDivisibleBy(str, num string) bool
|
||||
func IsEmail(str string) bool
|
||||
func IsFilePath(str string) (bool, int)
|
||||
|
@ -126,6 +125,7 @@ func IsFullWidth(str string) bool
|
|||
func IsHalfWidth(str string) bool
|
||||
func IsHexadecimal(str string) bool
|
||||
func IsHexcolor(str string) bool
|
||||
func IsHost(str string) bool
|
||||
func IsIP(str string) bool
|
||||
func IsIPv4(str string) bool
|
||||
func IsIPv6(str string) bool
|
||||
|
@ -134,6 +134,8 @@ func IsISBN10(str string) bool
|
|||
func IsISBN13(str string) bool
|
||||
func IsISO3166Alpha2(str string) bool
|
||||
func IsISO3166Alpha3(str string) bool
|
||||
func IsISO4217(str string) bool
|
||||
func IsIn(str string, params ...string) bool
|
||||
func IsInt(str string) bool
|
||||
func IsJSON(str string) bool
|
||||
func IsLatitude(str string) bool
|
||||
|
@ -151,11 +153,13 @@ func IsNumeric(str string) bool
|
|||
func IsPort(str string) bool
|
||||
func IsPositive(value float64) bool
|
||||
func IsPrintableASCII(str string) bool
|
||||
func IsRFC3339(str string) bool
|
||||
func IsRGBcolor(str string) bool
|
||||
func IsRequestURI(rawurl string) bool
|
||||
func IsRequestURL(rawurl string) bool
|
||||
func IsSSN(str string) bool
|
||||
func IsSemver(str string) bool
|
||||
func IsTime(str string, format string) bool
|
||||
func IsURL(str string) bool
|
||||
func IsUTFDigit(str string) bool
|
||||
func IsUTFLetter(str string) bool
|
||||
|
@ -172,12 +176,20 @@ func LeftTrim(str, chars string) string
|
|||
func Map(array []interface{}, iterator ResultIterator) []interface{}
|
||||
func Matches(str, pattern string) bool
|
||||
func NormalizeEmail(str string) (string, error)
|
||||
func PadBoth(str string, padStr string, padLen int) string
|
||||
func PadLeft(str string, padStr string, padLen int) string
|
||||
func PadRight(str string, padStr string, padLen int) string
|
||||
func Range(str string, params ...string) bool
|
||||
func RemoveTags(s string) string
|
||||
func ReplacePattern(str, pattern, replace string) string
|
||||
func Reverse(s string) string
|
||||
func RightTrim(str, chars string) string
|
||||
func RuneLength(str string, params ...string) bool
|
||||
func SafeFileName(str string) string
|
||||
func SetFieldsRequiredByDefault(value bool)
|
||||
func Sign(value float64) float64
|
||||
func StringLength(str string, params ...string) bool
|
||||
func StringMatches(s string, params ...string) bool
|
||||
func StripLow(str string, keepNewLines bool) string
|
||||
func ToBoolean(str string) (bool, error)
|
||||
func ToFloat(str string) (float64, error)
|
||||
|
@ -190,10 +202,12 @@ func UnderscoreToCamelCase(s string) string
|
|||
func ValidateStruct(s interface{}) (bool, error)
|
||||
func WhiteList(str, chars string) string
|
||||
type ConditionIterator
|
||||
type CustomTypeValidator
|
||||
type Error
|
||||
func (e Error) Error() string
|
||||
type Errors
|
||||
func (es Errors) Error() string
|
||||
func (es Errors) Errors() []error
|
||||
type ISO3166Entry
|
||||
type Iterator
|
||||
type ParamValidator
|
||||
|
@ -253,59 +267,65 @@ For completely custom validators (interface-based), see below.
|
|||
|
||||
Here is a list of available validators for struct fields (validator - used function):
|
||||
```go
|
||||
"alpha": IsAlpha,
|
||||
"alphanum": IsAlphanumeric,
|
||||
"ascii": IsASCII,
|
||||
"base64": IsBase64,
|
||||
"creditcard": IsCreditCard,
|
||||
"datauri": IsDataURI,
|
||||
"dialstring": IsDialString,
|
||||
"dns": IsDNSName,
|
||||
"email": IsEmail,
|
||||
"float": IsFloat,
|
||||
"fullwidth": IsFullWidth,
|
||||
"halfwidth": IsHalfWidth,
|
||||
"url": IsURL,
|
||||
"dialstring": IsDialString,
|
||||
"requrl": IsRequestURL,
|
||||
"requri": IsRequestURI,
|
||||
"alpha": IsAlpha,
|
||||
"utfletter": IsUTFLetter,
|
||||
"alphanum": IsAlphanumeric,
|
||||
"utfletternum": IsUTFLetterNumeric,
|
||||
"numeric": IsNumeric,
|
||||
"utfnumeric": IsUTFNumeric,
|
||||
"utfdigit": IsUTFDigit,
|
||||
"hexadecimal": IsHexadecimal,
|
||||
"hexcolor": IsHexcolor,
|
||||
"host": IsHost,
|
||||
"int": IsInt,
|
||||
"ip": IsIP,
|
||||
"ipv4": IsIPv4,
|
||||
"ipv6": IsIPv6,
|
||||
"isbn10": IsISBN10,
|
||||
"isbn13": IsISBN13,
|
||||
"json": IsJSON,
|
||||
"latitude": IsLatitude,
|
||||
"longitude": IsLongitude,
|
||||
"lowercase": IsLowerCase,
|
||||
"mac": IsMAC,
|
||||
"multibyte": IsMultibyte,
|
||||
"null": IsNull,
|
||||
"numeric": IsNumeric,
|
||||
"port": IsPort,
|
||||
"printableascii": IsPrintableASCII,
|
||||
"requri": IsRequestURI,
|
||||
"requrl": IsRequestURL,
|
||||
"rgbcolor": IsRGBcolor,
|
||||
"ssn": IsSSN,
|
||||
"semver": IsSemver,
|
||||
"lowercase": IsLowerCase,
|
||||
"uppercase": IsUpperCase,
|
||||
"url": IsURL,
|
||||
"utfdigit": IsUTFDigit,
|
||||
"utfletter": IsUTFLetter,
|
||||
"utfletternum": IsUTFLetterNumeric,
|
||||
"utfnumeric": IsUTFNumeric,
|
||||
"int": IsInt,
|
||||
"float": IsFloat,
|
||||
"null": IsNull,
|
||||
"uuid": IsUUID,
|
||||
"uuidv3": IsUUIDv3,
|
||||
"uuidv4": IsUUIDv4,
|
||||
"uuidv5": IsUUIDv5,
|
||||
"creditcard": IsCreditCard,
|
||||
"isbn10": IsISBN10,
|
||||
"isbn13": IsISBN13,
|
||||
"json": IsJSON,
|
||||
"multibyte": IsMultibyte,
|
||||
"ascii": IsASCII,
|
||||
"printableascii": IsPrintableASCII,
|
||||
"fullwidth": IsFullWidth,
|
||||
"halfwidth": IsHalfWidth,
|
||||
"variablewidth": IsVariableWidth,
|
||||
"base64": IsBase64,
|
||||
"datauri": IsDataURI,
|
||||
"ip": IsIP,
|
||||
"port": IsPort,
|
||||
"ipv4": IsIPv4,
|
||||
"ipv6": IsIPv6,
|
||||
"dns": IsDNSName,
|
||||
"host": IsHost,
|
||||
"mac": IsMAC,
|
||||
"latitude": IsLatitude,
|
||||
"longitude": IsLongitude,
|
||||
"ssn": IsSSN,
|
||||
"semver": IsSemver,
|
||||
"rfc3339": IsRFC3339,
|
||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
||||
```
|
||||
Validators with parameters
|
||||
|
||||
```go
|
||||
"range(min|max)": Range,
|
||||
"length(min|max)": ByteLength,
|
||||
"runelength(min|max)": RuneLength,
|
||||
"matches(pattern)": StringMatches,
|
||||
"in(string1|string2|...|stringN)": IsIn,
|
||||
```
|
||||
|
||||
And here is small example of usage:
|
||||
|
|
|
@ -4,7 +4,7 @@ import "regexp"
|
|||
|
||||
// Basic regular expressions for validating strings
|
||||
const (
|
||||
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
|
||||
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
|
||||
CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$"
|
||||
ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
|
||||
ISBN13 string = "^(?:[0-9]{13})$"
|
||||
|
@ -14,7 +14,7 @@ const (
|
|||
UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
||||
Alpha string = "^[a-zA-Z]+$"
|
||||
Alphanumeric string = "^[a-zA-Z0-9]+$"
|
||||
Numeric string = "^[-+]?[0-9]+$"
|
||||
Numeric string = "^[0-9]+$"
|
||||
Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
|
||||
Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
|
||||
Hexadecimal string = "^[0-9a-fA-F]+$"
|
||||
|
@ -29,7 +29,7 @@ const (
|
|||
DataURI string = "^data:.+\\/(.+);base64$"
|
||||
Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
|
||||
Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
|
||||
DNSName string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62})*$`
|
||||
DNSName string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62})*$`
|
||||
IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
|
||||
URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
|
||||
URLUsername string = `(\S+(:\S*)?@)`
|
||||
|
@ -37,11 +37,11 @@ const (
|
|||
URLPath string = `((\/|\?|#)[^\s]*)`
|
||||
URLPort string = `(:(\d{1,5}))`
|
||||
URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))`
|
||||
URLSubdomain string = `((www\.)|([a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*))`
|
||||
URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))` + URLPort + `?` + URLPath + `?$`
|
||||
URLSubdomain string = `((www\.)|([a-zA-Z0-9]([-\.][-\._a-zA-Z0-9]+)*))`
|
||||
URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
|
||||
SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
|
||||
WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
|
||||
UnixPath string = `^((?:\/[a-zA-Z0-9\.\:]+(?:_[a-zA-Z0-9\:\.]+)*(?:\-[\:a-zA-Z0-9\.]+)*)+\/?)$`
|
||||
UnixPath string = `^(/[^/\x00]*)+/?$`
|
||||
Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
|
||||
tagName string = "valid"
|
||||
)
|
||||
|
|
|
@ -29,15 +29,21 @@ type stringValues []reflect.Value
|
|||
// ParamTagMap is a map of functions accept variants parameters
|
||||
var ParamTagMap = map[string]ParamValidator{
|
||||
"length": ByteLength,
|
||||
"range": Range,
|
||||
"runelength": RuneLength,
|
||||
"stringlength": StringLength,
|
||||
"matches": StringMatches,
|
||||
"in": isInRaw,
|
||||
}
|
||||
|
||||
// ParamTagRegexMap maps param tags to their respective regexes.
|
||||
var ParamTagRegexMap = map[string]*regexp.Regexp{
|
||||
"range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
|
||||
"matches": regexp.MustCompile(`matches\(([^)]+)\)`),
|
||||
"in": regexp.MustCompile(`^in\((.*)\)`),
|
||||
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
|
||||
}
|
||||
|
||||
type customTypeTagMap struct {
|
||||
|
@ -113,6 +119,10 @@ var TagMap = map[string]Validator{
|
|||
"longitude": IsLongitude,
|
||||
"ssn": IsSSN,
|
||||
"semver": IsSemver,
|
||||
"rfc3339": IsRFC3339,
|
||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
||||
"ISO4217": IsISO4217,
|
||||
}
|
||||
|
||||
// ISO3166Entry stores country codes
|
||||
|
@ -376,3 +386,33 @@ var ISO3166List = []ISO3166Entry{
|
|||
{"Yemen", "Yémen (le)", "YE", "YEM", "887"},
|
||||
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
|
||||
}
|
||||
|
||||
// ISO4217List is the list of ISO currency codes
|
||||
var ISO4217List = []string{
|
||||
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
|
||||
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
|
||||
"CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
|
||||
"DJF", "DKK", "DOP", "DZD",
|
||||
"EGP", "ERN", "ETB", "EUR",
|
||||
"FJD", "FKP",
|
||||
"GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
|
||||
"HKD", "HNL", "HRK", "HTG", "HUF",
|
||||
"IDR", "ILS", "INR", "IQD", "IRR", "ISK",
|
||||
"JMD", "JOD", "JPY",
|
||||
"KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
|
||||
"LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
|
||||
"MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
|
||||
"NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
|
||||
"OMR",
|
||||
"PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
|
||||
"QAR",
|
||||
"RON", "RSD", "RUB", "RWF",
|
||||
"SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "SVC", "SYP", "SZL",
|
||||
"THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
|
||||
"UAH", "UGX", "USD", "USN", "UYI", "UYU", "UZS",
|
||||
"VEF", "VND", "VUV",
|
||||
"WST",
|
||||
"XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
|
||||
"YER",
|
||||
"ZAR", "ZMW", "ZWL",
|
||||
}
|
||||
|
|
|
@ -4,10 +4,12 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"html"
|
||||
"math"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Contains check if the string contains the substring.
|
||||
|
@ -211,3 +213,56 @@ func Truncate(str string, length int, ending string) string {
|
|||
|
||||
return str
|
||||
}
|
||||
|
||||
// PadLeft pad left side of string if size of string is less then indicated pad length
|
||||
func PadLeft(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, true, false)
|
||||
}
|
||||
|
||||
// PadRight pad right side of string if size of string is less then indicated pad length
|
||||
func PadRight(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, false, true)
|
||||
}
|
||||
|
||||
// PadBoth pad sides of string if size of string is less then indicated pad length
|
||||
func PadBoth(str string, padStr string, padLen int) string {
|
||||
return buildPadStr(str, padStr, padLen, true, true)
|
||||
}
|
||||
|
||||
// PadString either left, right or both sides, not the padding string can be unicode and more then one
|
||||
// character
|
||||
func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
|
||||
|
||||
// When padded length is less then the current string size
|
||||
if padLen < utf8.RuneCountInString(str) {
|
||||
return str
|
||||
}
|
||||
|
||||
padLen -= utf8.RuneCountInString(str)
|
||||
|
||||
targetLen := padLen
|
||||
|
||||
targetLenLeft := targetLen
|
||||
targetLenRight := targetLen
|
||||
if padLeft && padRight {
|
||||
targetLenLeft = padLen / 2
|
||||
targetLenRight = padLen - targetLenLeft
|
||||
}
|
||||
|
||||
strToRepeatLen := utf8.RuneCountInString(padStr)
|
||||
|
||||
repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
|
||||
repeatedString := strings.Repeat(padStr, repeatTimes)
|
||||
|
||||
leftSide := ""
|
||||
if padLeft {
|
||||
leftSide = repeatedString[0:targetLenLeft]
|
||||
}
|
||||
|
||||
rightSide := ""
|
||||
if padRight {
|
||||
rightSide = repeatedString[0:targetLenRight]
|
||||
}
|
||||
|
||||
return leftSide + str + rightSide
|
||||
}
|
||||
|
|
|
@ -11,13 +11,16 @@ import (
|
|||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var fieldsRequiredByDefault bool
|
||||
|
||||
const maxURLRuneCount = 2083
|
||||
const minURLRuneCount = 3
|
||||
|
||||
// SetFieldsRequiredByDefault causes validation to fail when struct fields
|
||||
// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`).
|
||||
// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
|
||||
|
@ -44,7 +47,7 @@ func IsEmail(str string) bool {
|
|||
|
||||
// IsURL check if the string is an URL.
|
||||
func IsURL(str string) bool {
|
||||
if str == "" || len(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") {
|
||||
if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") {
|
||||
return false
|
||||
}
|
||||
u, err := url.Parse(str)
|
||||
|
@ -62,7 +65,7 @@ func IsURL(str string) bool {
|
|||
}
|
||||
|
||||
// IsRequestURL check if the string rawurl, assuming
|
||||
// it was recieved in an HTTP request, is a valid
|
||||
// it was received in an HTTP request, is a valid
|
||||
// URL confirm to RFC 3986
|
||||
func IsRequestURL(rawurl string) bool {
|
||||
url, err := url.ParseRequestURI(rawurl)
|
||||
|
@ -76,7 +79,7 @@ func IsRequestURL(rawurl string) bool {
|
|||
}
|
||||
|
||||
// IsRequestURI check if the string rawurl, assuming
|
||||
// it was recieved in an HTTP request, is an
|
||||
// it was received in an HTTP request, is an
|
||||
// absolute URI or an absolute path.
|
||||
func IsRequestURI(rawurl string) bool {
|
||||
_, err := url.ParseRequestURI(rawurl)
|
||||
|
@ -458,7 +461,7 @@ func IsDNSName(str string) bool {
|
|||
// constraints already violated
|
||||
return false
|
||||
}
|
||||
return rxDNSName.MatchString(str)
|
||||
return !IsIP(str) && rxDNSName.MatchString(str)
|
||||
}
|
||||
|
||||
// IsDialString validates the given string for usage with the various Dial() functions
|
||||
|
@ -535,6 +538,17 @@ func IsLongitude(str string) bool {
|
|||
return rxLongitude.MatchString(str)
|
||||
}
|
||||
|
||||
func toJSONName(tag string) string {
|
||||
if tag == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
// JSON name always comes first. If there's no options then split[0] is
|
||||
// JSON name, if JSON name is not set, then split[0] is an empty string.
|
||||
split := strings.SplitN(tag, ",", 2)
|
||||
return split[0]
|
||||
}
|
||||
|
||||
// ValidateStruct use tags for fields.
|
||||
// result will be equal to `false` if there are any errors.
|
||||
func ValidateStruct(s interface{}) (bool, error) {
|
||||
|
@ -558,11 +572,39 @@ func ValidateStruct(s interface{}) (bool, error) {
|
|||
if typeField.PkgPath != "" {
|
||||
continue // Private field
|
||||
}
|
||||
resultField, err2 := typeCheck(valueField, typeField, val)
|
||||
structResult := true
|
||||
if valueField.Kind() == reflect.Struct {
|
||||
var err error
|
||||
structResult, err = ValidateStruct(valueField.Interface())
|
||||
if err != nil {
|
||||
errs = append(errs, err)
|
||||
}
|
||||
}
|
||||
resultField, err2 := typeCheck(valueField, typeField, val, nil)
|
||||
if err2 != nil {
|
||||
|
||||
// Replace structure name with JSON name if there is a tag on the variable
|
||||
jsonTag := toJSONName(typeField.Tag.Get("json"))
|
||||
if jsonTag != "" {
|
||||
switch jsonError := err2.(type) {
|
||||
case Error:
|
||||
jsonError.Name = jsonTag
|
||||
err2 = jsonError
|
||||
case Errors:
|
||||
for _, e := range jsonError.Errors() {
|
||||
switch tempErr := e.(type) {
|
||||
case Error:
|
||||
tempErr.Name = jsonTag
|
||||
_ = tempErr
|
||||
}
|
||||
}
|
||||
err2 = jsonError
|
||||
}
|
||||
}
|
||||
|
||||
errs = append(errs, err2)
|
||||
}
|
||||
result = result && resultField
|
||||
result = result && resultField && structResult
|
||||
}
|
||||
if len(errs) > 0 {
|
||||
err = errs
|
||||
|
@ -594,7 +636,7 @@ func isValidTag(s string) bool {
|
|||
}
|
||||
for _, c := range s {
|
||||
switch {
|
||||
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
|
||||
case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
|
||||
// Backslash and quote chars are reserved, but
|
||||
// otherwise any punctuation chars are allowed
|
||||
// in a tag name.
|
||||
|
@ -620,6 +662,28 @@ func IsSemver(str string) bool {
|
|||
return rxSemver.MatchString(str)
|
||||
}
|
||||
|
||||
// IsTime check if string is valid according to given format
|
||||
func IsTime(str string, format string) bool {
|
||||
_, err := time.Parse(format, str)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// IsRFC3339 check if string is valid timestamp value according to RFC3339
|
||||
func IsRFC3339(str string) bool {
|
||||
return IsTime(str, time.RFC3339)
|
||||
}
|
||||
|
||||
// IsISO4217 check if string is valid ISO currency code
|
||||
func IsISO4217(str string) bool {
|
||||
for _, currency := range ISO4217List {
|
||||
if str == currency {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// ByteLength check string's length
|
||||
func ByteLength(str string, params ...string) bool {
|
||||
if len(params) == 2 {
|
||||
|
@ -631,6 +695,12 @@ func ByteLength(str string, params ...string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// RuneLength check string's length
|
||||
// Alias for StringLength
|
||||
func RuneLength(str string, params ...string) bool {
|
||||
return StringLength(str, params...)
|
||||
}
|
||||
|
||||
// StringMatches checks if a string matches a given pattern.
|
||||
func StringMatches(s string, params ...string) bool {
|
||||
if len(params) == 1 {
|
||||
|
@ -653,6 +723,41 @@ func StringLength(str string, params ...string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// Range check string's length
|
||||
func Range(str string, params ...string) bool {
|
||||
if len(params) == 2 {
|
||||
value, _ := ToFloat(str)
|
||||
min, _ := ToFloat(params[0])
|
||||
max, _ := ToFloat(params[1])
|
||||
return InRange(value, min, max)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isInRaw(str string, params ...string) bool {
|
||||
if len(params) == 1 {
|
||||
rawParams := params[0]
|
||||
|
||||
parsedParams := strings.Split(rawParams, "|")
|
||||
|
||||
return IsIn(str, parsedParams...)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// IsIn check if string str is a member of the set of strings params
|
||||
func IsIn(str string, params ...string) bool {
|
||||
for _, param := range params {
|
||||
if str == param {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) {
|
||||
if requiredOption, isRequired := options["required"]; isRequired {
|
||||
if len(requiredOption) > 0 {
|
||||
|
@ -666,7 +771,7 @@ func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, error) {
|
||||
func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) {
|
||||
if !v.IsValid() {
|
||||
return false, nil
|
||||
}
|
||||
|
@ -684,12 +789,22 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
return true, nil
|
||||
}
|
||||
|
||||
options := parseTagIntoMap(tag)
|
||||
isRootType := false
|
||||
if options == nil {
|
||||
isRootType = true
|
||||
options = parseTagIntoMap(tag)
|
||||
}
|
||||
|
||||
if isEmptyValue(v) {
|
||||
// an empty value is not validated, check only required
|
||||
return checkRequired(v, t, options)
|
||||
}
|
||||
|
||||
var customTypeErrors Errors
|
||||
var customTypeValidatorsExist bool
|
||||
for validatorName, customErrorMessage := range options {
|
||||
if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok {
|
||||
customTypeValidatorsExist = true
|
||||
delete(options, validatorName)
|
||||
|
||||
if result := validatefunc(v.Interface(), o.Interface()); !result {
|
||||
if len(customErrorMessage) > 0 {
|
||||
customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf(customErrorMessage), CustomErrorMessageExists: true})
|
||||
|
@ -699,16 +814,26 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
}
|
||||
}
|
||||
}
|
||||
if customTypeValidatorsExist {
|
||||
if len(customTypeErrors.Errors()) > 0 {
|
||||
return false, customTypeErrors
|
||||
}
|
||||
return true, nil
|
||||
|
||||
if len(customTypeErrors.Errors()) > 0 {
|
||||
return false, customTypeErrors
|
||||
}
|
||||
|
||||
if isEmptyValue(v) {
|
||||
// an empty value is not validated, check only required
|
||||
return checkRequired(v, t, options)
|
||||
if isRootType {
|
||||
// Ensure that we've checked the value by all specified validators before report that the value is valid
|
||||
defer func() {
|
||||
delete(options, "optional")
|
||||
delete(options, "required")
|
||||
|
||||
if isValid && resultErr == nil && len(options) != 0 {
|
||||
for validator := range options {
|
||||
isValid = false
|
||||
resultErr = Error{t.Name, fmt.Errorf(
|
||||
"The following validator is invalid or can't be applied to the field: %q", validator), false}
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
switch v.Kind() {
|
||||
|
@ -718,10 +843,12 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
reflect.Float32, reflect.Float64,
|
||||
reflect.String:
|
||||
// for each tag option check the map of validator functions
|
||||
for validator, customErrorMessage := range options {
|
||||
for validatorSpec, customErrorMessage := range options {
|
||||
var negate bool
|
||||
validator := validatorSpec
|
||||
customMsgExists := (len(customErrorMessage) > 0)
|
||||
// Check wether the tag looks like '!something' or 'something'
|
||||
|
||||
// Check whether the tag looks like '!something' or 'something'
|
||||
if validator[0] == '!' {
|
||||
validator = string(validator[1:])
|
||||
negate = true
|
||||
|
@ -730,38 +857,47 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
// Check for param validators
|
||||
for key, value := range ParamTagRegexMap {
|
||||
ps := value.FindStringSubmatch(validator)
|
||||
if len(ps) > 0 {
|
||||
if validatefunc, ok := ParamTagMap[key]; ok {
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
field := fmt.Sprint(v) // make value into string, then validate with regex
|
||||
if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
|
||||
var err error
|
||||
if !negate {
|
||||
if customMsgExists {
|
||||
err = fmt.Errorf(customErrorMessage)
|
||||
} else {
|
||||
err = fmt.Errorf("%s does not validate as %s", field, validator)
|
||||
}
|
||||
if len(ps) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
} else {
|
||||
if customMsgExists {
|
||||
err = fmt.Errorf(customErrorMessage)
|
||||
} else {
|
||||
err = fmt.Errorf("%s does validate as %s", field, validator)
|
||||
}
|
||||
}
|
||||
return false, Error{t.Name, err, customMsgExists}
|
||||
validatefunc, ok := ParamTagMap[key]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
delete(options, validatorSpec)
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
field := fmt.Sprint(v) // make value into string, then validate with regex
|
||||
if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
|
||||
var err error
|
||||
if !negate {
|
||||
if customMsgExists {
|
||||
err = fmt.Errorf(customErrorMessage)
|
||||
} else {
|
||||
err = fmt.Errorf("%s does not validate as %s", field, validator)
|
||||
}
|
||||
|
||||
} else {
|
||||
if customMsgExists {
|
||||
err = fmt.Errorf(customErrorMessage)
|
||||
} else {
|
||||
err = fmt.Errorf("%s does validate as %s", field, validator)
|
||||
}
|
||||
default:
|
||||
// type not yet supported, fail
|
||||
return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false}
|
||||
}
|
||||
return false, Error{t.Name, err, customMsgExists}
|
||||
}
|
||||
default:
|
||||
// type not yet supported, fail
|
||||
return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false}
|
||||
}
|
||||
}
|
||||
|
||||
if validatefunc, ok := TagMap[validator]; ok {
|
||||
delete(options, validatorSpec)
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
field := fmt.Sprint(v) // make value into string, then validate with regex
|
||||
|
@ -813,7 +949,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
var resultItem bool
|
||||
var err error
|
||||
if v.Index(i).Kind() != reflect.Struct {
|
||||
resultItem, err = typeCheck(v.Index(i), t, o)
|
||||
resultItem, err = typeCheck(v.Index(i), t, o, options)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
@ -832,7 +968,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
var resultItem bool
|
||||
var err error
|
||||
if v.Index(i).Kind() != reflect.Struct {
|
||||
resultItem, err = typeCheck(v.Index(i), t, o)
|
||||
resultItem, err = typeCheck(v.Index(i), t, o, options)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
@ -856,7 +992,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
|
|||
if v.IsNil() {
|
||||
return true, nil
|
||||
}
|
||||
return typeCheck(v.Elem(), t, o)
|
||||
return typeCheck(v.Elem(), t, o, options)
|
||||
case reflect.Struct:
|
||||
return ValidateStruct(v.Interface())
|
||||
default:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
box: wercker/golang
|
||||
box: golang
|
||||
build:
|
||||
steps:
|
||||
- setup-go-workspace
|
||||
|
|
|
@ -48,8 +48,8 @@ type Block interface {
|
|||
Queryable
|
||||
}
|
||||
|
||||
// HeadBlock is a regular block that can still be appended to.
|
||||
type HeadBlock interface {
|
||||
// headBlock is a regular block that can still be appended to.
|
||||
type headBlock interface {
|
||||
Block
|
||||
Appendable
|
||||
}
|
||||
|
|
|
@ -118,7 +118,7 @@ type DB struct {
|
|||
// or the general layout.
|
||||
// Must never be held when acquiring a blocks's mutex!
|
||||
headmtx sync.RWMutex
|
||||
heads []HeadBlock
|
||||
heads []headBlock
|
||||
|
||||
compactor Compactor
|
||||
|
||||
|
@ -401,7 +401,7 @@ func (db *DB) reloadBlocks() error {
|
|||
var (
|
||||
metas []*BlockMeta
|
||||
blocks []Block
|
||||
heads []HeadBlock
|
||||
heads []headBlock
|
||||
seqBlocks = make(map[int]Block, len(dirs))
|
||||
)
|
||||
|
||||
|
@ -418,7 +418,7 @@ func (db *DB) reloadBlocks() error {
|
|||
|
||||
if meta.Compaction.Generation == 0 {
|
||||
if !ok {
|
||||
b, err = openHeadBlock(dirs[i], db.logger)
|
||||
b, err = db.openHeadBlock(dirs[i])
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "load head at %s", dirs[i])
|
||||
}
|
||||
|
@ -426,7 +426,7 @@ func (db *DB) reloadBlocks() error {
|
|||
if meta.ULID != b.Meta().ULID {
|
||||
return errors.Errorf("head block ULID changed unexpectedly")
|
||||
}
|
||||
heads = append(heads, b.(HeadBlock))
|
||||
heads = append(heads, b.(headBlock))
|
||||
} else {
|
||||
if !ok || meta.ULID != b.Meta().ULID {
|
||||
b, err = newPersistedBlock(dirs[i])
|
||||
|
@ -559,7 +559,7 @@ func (a *dbAppender) appenderFor(t int64) (*metaAppender, error) {
|
|||
if len(a.heads) == 0 || t >= a.heads[len(a.heads)-1].meta.MaxTime {
|
||||
a.db.headmtx.Lock()
|
||||
|
||||
var newHeads []HeadBlock
|
||||
var newHeads []headBlock
|
||||
|
||||
if err := a.db.ensureHead(t); err != nil {
|
||||
a.db.headmtx.Unlock()
|
||||
|
@ -670,9 +670,9 @@ func (a *dbAppender) Rollback() error {
|
|||
}
|
||||
|
||||
// appendable returns a copy of a slice of HeadBlocks that can still be appended to.
|
||||
func (db *DB) appendable() []HeadBlock {
|
||||
func (db *DB) appendable() []headBlock {
|
||||
var i int
|
||||
app := make([]HeadBlock, 0, db.opts.AppendableBlocks)
|
||||
app := make([]headBlock, 0, db.opts.AppendableBlocks)
|
||||
|
||||
if len(db.heads) > db.opts.AppendableBlocks {
|
||||
i = len(db.heads) - db.opts.AppendableBlocks
|
||||
|
@ -709,16 +709,37 @@ func (db *DB) blocksForInterval(mint, maxt int64) []Block {
|
|||
return bs
|
||||
}
|
||||
|
||||
// openHeadBlock opens the head block at dir.
|
||||
func (db *DB) openHeadBlock(dir string) (*HeadBlock, error) {
|
||||
var (
|
||||
wdir = filepath.Join(dir, "wal")
|
||||
l = log.With(db.logger, "wal", wdir)
|
||||
)
|
||||
wal, err := OpenSegmentWAL(wdir, l, 5*time.Second)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "open WAL %s")
|
||||
}
|
||||
|
||||
h, err := OpenHeadBlock(dir, log.With(db.logger, "block", dir), wal)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "open head block %s", dir)
|
||||
}
|
||||
return h, nil
|
||||
}
|
||||
|
||||
// cut starts a new head block to append to. The completed head block
|
||||
// will still be appendable for the configured grace period.
|
||||
func (db *DB) cut(mint int64) (HeadBlock, error) {
|
||||
func (db *DB) cut(mint int64) (headBlock, error) {
|
||||
maxt := mint + int64(db.opts.MinBlockDuration)
|
||||
|
||||
dir, seq, err := nextSequenceFile(db.dir, "b-")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newHead, err := createHeadBlock(dir, seq, db.logger, mint, maxt)
|
||||
if err := TouchHeadBlock(dir, seq, mint, maxt); err != nil {
|
||||
return nil, errors.Wrapf(err, "touch head block %s", dir)
|
||||
}
|
||||
newHead, err := db.openHeadBlock(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
@ -47,11 +47,11 @@ var (
|
|||
ErrOutOfBounds = errors.New("out of bounds")
|
||||
)
|
||||
|
||||
// headBlock handles reads and writes of time series data within a time window.
|
||||
type headBlock struct {
|
||||
// HeadBlock handles reads and writes of time series data within a time window.
|
||||
type HeadBlock struct {
|
||||
mtx sync.RWMutex
|
||||
dir string
|
||||
wal *WAL
|
||||
wal WAL
|
||||
|
||||
activeWriters uint64
|
||||
closed bool
|
||||
|
@ -69,19 +69,21 @@ type headBlock struct {
|
|||
meta BlockMeta
|
||||
}
|
||||
|
||||
func createHeadBlock(dir string, seq int, l log.Logger, mint, maxt int64) (*headBlock, error) {
|
||||
// TouchHeadBlock atomically touches a new head block in dir for
|
||||
// samples in the range [mint,maxt).
|
||||
func TouchHeadBlock(dir string, seq int, mint, maxt int64) error {
|
||||
// Make head block creation appear atomic.
|
||||
tmp := dir + ".tmp"
|
||||
|
||||
if err := os.MkdirAll(tmp, 0777); err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
entropy := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
|
||||
ulid, err := ulid.New(ulid.Now(), entropy)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeMetaFile(tmp, &BlockMeta{
|
||||
|
@ -90,38 +92,33 @@ func createHeadBlock(dir string, seq int, l log.Logger, mint, maxt int64) (*head
|
|||
MinTime: mint,
|
||||
MaxTime: maxt,
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
if err := renameFile(tmp, dir); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return openHeadBlock(dir, l)
|
||||
return renameFile(tmp, dir)
|
||||
}
|
||||
|
||||
// openHeadBlock creates a new empty head block.
|
||||
func openHeadBlock(dir string, l log.Logger) (*headBlock, error) {
|
||||
wal, err := OpenWAL(dir, log.With(l, "component", "wal"), 5*time.Second)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// OpenHeadBlock opens the head block in dir.
|
||||
func OpenHeadBlock(dir string, l log.Logger, wal WAL) (*HeadBlock, error) {
|
||||
meta, err := readMetaFile(dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
h := &headBlock{
|
||||
h := &HeadBlock{
|
||||
dir: dir,
|
||||
wal: wal,
|
||||
series: []*memSeries{},
|
||||
series: []*memSeries{nil}, // 0 is not a valid posting, filled with nil.
|
||||
hashes: map[uint64][]*memSeries{},
|
||||
values: map[string]stringset{},
|
||||
postings: &memPostings{m: make(map[term][]uint32)},
|
||||
meta: *meta,
|
||||
}
|
||||
return h, h.init()
|
||||
}
|
||||
|
||||
r := wal.Reader()
|
||||
func (h *HeadBlock) init() error {
|
||||
r := h.wal.Reader()
|
||||
|
||||
Outer:
|
||||
for r.Next() {
|
||||
series, samples := r.At()
|
||||
|
||||
|
@ -130,37 +127,32 @@ Outer:
|
|||
h.meta.Stats.NumSeries++
|
||||
}
|
||||
for _, s := range samples {
|
||||
if int(s.ref) >= len(h.series) {
|
||||
l.Log("msg", "unknown series reference, abort WAL restore", "got", s.ref, "max", len(h.series)-1)
|
||||
break Outer
|
||||
if int(s.Ref) >= len(h.series) {
|
||||
return errors.Errorf("unknown series reference %d (max %d); abort WAL restore", s.Ref, len(h.series))
|
||||
}
|
||||
h.series[s.ref].append(s.t, s.v)
|
||||
h.series[s.Ref].append(s.T, s.V)
|
||||
|
||||
if !h.inBounds(s.t) {
|
||||
return nil, errors.Wrap(ErrOutOfBounds, "consume WAL")
|
||||
if !h.inBounds(s.T) {
|
||||
return errors.Wrap(ErrOutOfBounds, "consume WAL")
|
||||
}
|
||||
h.meta.Stats.NumSamples++
|
||||
}
|
||||
}
|
||||
if err := r.Err(); err != nil {
|
||||
return nil, errors.Wrap(err, "consume WAL")
|
||||
}
|
||||
|
||||
return h, nil
|
||||
return errors.Wrap(r.Err(), "consume WAL")
|
||||
}
|
||||
|
||||
// inBounds returns true if the given timestamp is within the valid
|
||||
// time bounds of the block.
|
||||
func (h *headBlock) inBounds(t int64) bool {
|
||||
func (h *HeadBlock) inBounds(t int64) bool {
|
||||
return t >= h.meta.MinTime && t <= h.meta.MaxTime
|
||||
}
|
||||
|
||||
func (h *headBlock) String() string {
|
||||
func (h *HeadBlock) String() string {
|
||||
return fmt.Sprintf("(%d, %s)", h.meta.Sequence, h.meta.ULID)
|
||||
}
|
||||
|
||||
// Close syncs all data and closes underlying resources of the head block.
|
||||
func (h *headBlock) Close() error {
|
||||
func (h *HeadBlock) Close() error {
|
||||
h.mtx.Lock()
|
||||
defer h.mtx.Unlock()
|
||||
|
||||
|
@ -184,7 +176,7 @@ func (h *headBlock) Close() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (h *headBlock) Meta() BlockMeta {
|
||||
func (h *HeadBlock) Meta() BlockMeta {
|
||||
m := BlockMeta{
|
||||
ULID: h.meta.ULID,
|
||||
Sequence: h.meta.Sequence,
|
||||
|
@ -200,12 +192,12 @@ func (h *headBlock) Meta() BlockMeta {
|
|||
return m
|
||||
}
|
||||
|
||||
func (h *headBlock) Dir() string { return h.dir }
|
||||
func (h *headBlock) Persisted() bool { return false }
|
||||
func (h *headBlock) Index() IndexReader { return &headIndexReader{h} }
|
||||
func (h *headBlock) Chunks() ChunkReader { return &headChunkReader{h} }
|
||||
func (h *HeadBlock) Dir() string { return h.dir }
|
||||
func (h *HeadBlock) Persisted() bool { return false }
|
||||
func (h *HeadBlock) Index() IndexReader { return &headIndexReader{h} }
|
||||
func (h *HeadBlock) Chunks() ChunkReader { return &headChunkReader{h} }
|
||||
|
||||
func (h *headBlock) Querier(mint, maxt int64) Querier {
|
||||
func (h *HeadBlock) Querier(mint, maxt int64) Querier {
|
||||
h.mtx.RLock()
|
||||
defer h.mtx.RUnlock()
|
||||
|
||||
|
@ -244,7 +236,7 @@ func (h *headBlock) Querier(mint, maxt int64) Querier {
|
|||
}
|
||||
}
|
||||
|
||||
func (h *headBlock) Appender() Appender {
|
||||
func (h *HeadBlock) Appender() Appender {
|
||||
atomic.AddUint64(&h.activeWriters, 1)
|
||||
|
||||
h.mtx.RLock()
|
||||
|
@ -252,36 +244,36 @@ func (h *headBlock) Appender() Appender {
|
|||
if h.closed {
|
||||
panic(fmt.Sprintf("block %s already closed", h.dir))
|
||||
}
|
||||
return &headAppender{headBlock: h, samples: getHeadAppendBuffer()}
|
||||
return &headAppender{HeadBlock: h, samples: getHeadAppendBuffer()}
|
||||
}
|
||||
|
||||
func (h *headBlock) Busy() bool {
|
||||
func (h *HeadBlock) Busy() bool {
|
||||
return atomic.LoadUint64(&h.activeWriters) > 0
|
||||
}
|
||||
|
||||
var headPool = sync.Pool{}
|
||||
|
||||
func getHeadAppendBuffer() []refdSample {
|
||||
func getHeadAppendBuffer() []RefSample {
|
||||
b := headPool.Get()
|
||||
if b == nil {
|
||||
return make([]refdSample, 0, 512)
|
||||
return make([]RefSample, 0, 512)
|
||||
}
|
||||
return b.([]refdSample)
|
||||
return b.([]RefSample)
|
||||
}
|
||||
|
||||
func putHeadAppendBuffer(b []refdSample) {
|
||||
func putHeadAppendBuffer(b []RefSample) {
|
||||
headPool.Put(b[:0])
|
||||
}
|
||||
|
||||
type headAppender struct {
|
||||
*headBlock
|
||||
*HeadBlock
|
||||
|
||||
newSeries map[uint64]hashedLabels
|
||||
newHashes map[uint64]uint64
|
||||
refmap map[uint64]uint64
|
||||
newLabels []labels.Labels
|
||||
|
||||
samples []refdSample
|
||||
samples []RefSample
|
||||
}
|
||||
|
||||
type hashedLabels struct {
|
||||
|
@ -289,12 +281,6 @@ type hashedLabels struct {
|
|||
labels labels.Labels
|
||||
}
|
||||
|
||||
type refdSample struct {
|
||||
ref uint64
|
||||
t int64
|
||||
v float64
|
||||
}
|
||||
|
||||
func (a *headAppender) Add(lset labels.Labels, t int64, v float64) (uint64, error) {
|
||||
if !a.inBounds(t) {
|
||||
return 0, ErrOutOfBounds
|
||||
|
@ -369,10 +355,10 @@ func (a *headAppender) AddFast(ref uint64, t int64, v float64) error {
|
|||
}
|
||||
}
|
||||
|
||||
a.samples = append(a.samples, refdSample{
|
||||
ref: ref,
|
||||
t: t,
|
||||
v: v,
|
||||
a.samples = append(a.samples, RefSample{
|
||||
Ref: ref,
|
||||
T: t,
|
||||
V: v,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
@ -418,8 +404,8 @@ func (a *headAppender) Commit() error {
|
|||
for i := range a.samples {
|
||||
s := &a.samples[i]
|
||||
|
||||
if s.ref&(1<<32) > 0 {
|
||||
s.ref = a.refmap[s.ref]
|
||||
if s.Ref&(1<<32) > 0 {
|
||||
s.Ref = a.refmap[s.Ref]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -433,7 +419,7 @@ func (a *headAppender) Commit() error {
|
|||
total := uint64(len(a.samples))
|
||||
|
||||
for _, s := range a.samples {
|
||||
if !a.series[s.ref].append(s.t, s.v) {
|
||||
if !a.series[s.Ref].append(s.T, s.V) {
|
||||
total--
|
||||
}
|
||||
}
|
||||
|
@ -454,7 +440,7 @@ func (a *headAppender) Rollback() error {
|
|||
}
|
||||
|
||||
type headChunkReader struct {
|
||||
*headBlock
|
||||
*HeadBlock
|
||||
}
|
||||
|
||||
// Chunk returns the chunk for the reference number.
|
||||
|
@ -490,7 +476,7 @@ func (c *safeChunk) Iterator() chunks.Iterator {
|
|||
// func (c *safeChunk) Encoding() chunks.Encoding { panic("illegal") }
|
||||
|
||||
type headIndexReader struct {
|
||||
*headBlock
|
||||
*HeadBlock
|
||||
}
|
||||
|
||||
// LabelValues returns the possible label values
|
||||
|
@ -558,7 +544,7 @@ func (h *headIndexReader) LabelIndices() ([][]string, error) {
|
|||
|
||||
// get retrieves the chunk with the hash and label set and creates
|
||||
// a new one if it doesn't exist yet.
|
||||
func (h *headBlock) get(hash uint64, lset labels.Labels) *memSeries {
|
||||
func (h *HeadBlock) get(hash uint64, lset labels.Labels) *memSeries {
|
||||
series := h.hashes[hash]
|
||||
|
||||
for _, s := range series {
|
||||
|
@ -569,11 +555,13 @@ func (h *headBlock) get(hash uint64, lset labels.Labels) *memSeries {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (h *headBlock) create(hash uint64, lset labels.Labels) *memSeries {
|
||||
func (h *HeadBlock) create(hash uint64, lset labels.Labels) *memSeries {
|
||||
s := &memSeries{
|
||||
lset: lset,
|
||||
ref: uint32(len(h.series)),
|
||||
}
|
||||
// create the initial chunk and appender
|
||||
s.cut()
|
||||
|
||||
// Allocate empty space until we can insert at the given index.
|
||||
h.series = append(h.series, s)
|
||||
|
@ -636,7 +624,7 @@ func (s *memSeries) append(t int64, v float64) bool {
|
|||
|
||||
var c *memChunk
|
||||
|
||||
if s.app == nil || s.head().samples > 130 {
|
||||
if s.head().samples > 130 {
|
||||
c = s.cut()
|
||||
c.minTime = t
|
||||
} else {
|
||||
|
|
|
@ -33,7 +33,7 @@ func (p *memPostings) get(t term) Postings {
|
|||
if l == nil {
|
||||
return emptyPostings
|
||||
}
|
||||
return &listPostings{list: l, idx: -1}
|
||||
return newListPostings(l)
|
||||
}
|
||||
|
||||
// add adds a document to the index. The caller has to ensure that no
|
||||
|
@ -70,18 +70,13 @@ func (e errPostings) Seek(uint32) bool { return false }
|
|||
func (e errPostings) At() uint32 { return 0 }
|
||||
func (e errPostings) Err() error { return e.err }
|
||||
|
||||
func expandPostings(p Postings) (res []uint32, err error) {
|
||||
for p.Next() {
|
||||
res = append(res, p.At())
|
||||
}
|
||||
return res, p.Err()
|
||||
}
|
||||
var emptyPostings = errPostings{}
|
||||
|
||||
// Intersect returns a new postings list over the intersection of the
|
||||
// input postings.
|
||||
func Intersect(its ...Postings) Postings {
|
||||
if len(its) == 0 {
|
||||
return errPostings{err: nil}
|
||||
return emptyPostings
|
||||
}
|
||||
a := its[0]
|
||||
|
||||
|
@ -91,8 +86,6 @@ func Intersect(its ...Postings) Postings {
|
|||
return a
|
||||
}
|
||||
|
||||
var emptyPostings = errPostings{}
|
||||
|
||||
type intersectPostings struct {
|
||||
a, b Postings
|
||||
aok, bok bool
|
||||
|
@ -100,41 +93,44 @@ type intersectPostings struct {
|
|||
}
|
||||
|
||||
func newIntersectPostings(a, b Postings) *intersectPostings {
|
||||
it := &intersectPostings{a: a, b: b}
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
|
||||
return it
|
||||
return &intersectPostings{a: a, b: b}
|
||||
}
|
||||
|
||||
func (it *intersectPostings) At() uint32 {
|
||||
return it.cur
|
||||
}
|
||||
|
||||
func (it *intersectPostings) Next() bool {
|
||||
func (it *intersectPostings) doNext(id uint32) bool {
|
||||
for {
|
||||
if !it.aok || !it.bok {
|
||||
if !it.b.Seek(id) {
|
||||
return false
|
||||
}
|
||||
av, bv := it.a.At(), it.b.At()
|
||||
|
||||
if av < bv {
|
||||
it.aok = it.a.Seek(bv)
|
||||
} else if bv < av {
|
||||
it.bok = it.b.Seek(av)
|
||||
} else {
|
||||
it.cur = av
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
return true
|
||||
if vb := it.b.At(); vb != id {
|
||||
if !it.a.Seek(vb) {
|
||||
return false
|
||||
}
|
||||
id = it.a.At()
|
||||
if vb != id {
|
||||
continue
|
||||
}
|
||||
}
|
||||
it.cur = id
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func (it *intersectPostings) Next() bool {
|
||||
if !it.a.Next() {
|
||||
return false
|
||||
}
|
||||
return it.doNext(it.a.At())
|
||||
}
|
||||
|
||||
func (it *intersectPostings) Seek(id uint32) bool {
|
||||
it.aok = it.a.Seek(id)
|
||||
it.bok = it.b.Seek(id)
|
||||
return it.Next()
|
||||
if !it.a.Seek(id) {
|
||||
return false
|
||||
}
|
||||
return it.doNext(it.a.At())
|
||||
}
|
||||
|
||||
func (it *intersectPostings) Err() error {
|
||||
|
@ -158,17 +154,14 @@ func Merge(its ...Postings) Postings {
|
|||
}
|
||||
|
||||
type mergedPostings struct {
|
||||
a, b Postings
|
||||
aok, bok bool
|
||||
cur uint32
|
||||
a, b Postings
|
||||
initialized bool
|
||||
aok, bok bool
|
||||
cur uint32
|
||||
}
|
||||
|
||||
func newMergedPostings(a, b Postings) *mergedPostings {
|
||||
it := &mergedPostings{a: a, b: b}
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
|
||||
return it
|
||||
return &mergedPostings{a: a, b: b}
|
||||
}
|
||||
|
||||
func (it *mergedPostings) At() uint32 {
|
||||
|
@ -176,6 +169,12 @@ func (it *mergedPostings) At() uint32 {
|
|||
}
|
||||
|
||||
func (it *mergedPostings) Next() bool {
|
||||
if !it.initialized {
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
it.initialized = true
|
||||
}
|
||||
|
||||
if !it.aok && !it.bok {
|
||||
return false
|
||||
}
|
||||
|
@ -196,23 +195,25 @@ func (it *mergedPostings) Next() bool {
|
|||
if acur < bcur {
|
||||
it.cur = acur
|
||||
it.aok = it.a.Next()
|
||||
return true
|
||||
}
|
||||
if bcur < acur {
|
||||
} else if acur > bcur {
|
||||
it.cur = bcur
|
||||
it.bok = it.b.Next()
|
||||
return true
|
||||
} else {
|
||||
it.cur = acur
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
}
|
||||
it.cur = acur
|
||||
it.aok = it.a.Next()
|
||||
it.bok = it.b.Next()
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (it *mergedPostings) Seek(id uint32) bool {
|
||||
if it.cur >= id {
|
||||
return true
|
||||
}
|
||||
|
||||
it.aok = it.a.Seek(id)
|
||||
it.bok = it.b.Seek(id)
|
||||
it.initialized = true
|
||||
|
||||
return it.Next()
|
||||
}
|
||||
|
@ -227,28 +228,44 @@ func (it *mergedPostings) Err() error {
|
|||
// listPostings implements the Postings interface over a plain list.
|
||||
type listPostings struct {
|
||||
list []uint32
|
||||
idx int
|
||||
cur uint32
|
||||
}
|
||||
|
||||
func newListPostings(list []uint32) *listPostings {
|
||||
return &listPostings{list: list, idx: -1}
|
||||
return &listPostings{list: list}
|
||||
}
|
||||
|
||||
func (it *listPostings) At() uint32 {
|
||||
return it.list[it.idx]
|
||||
return it.cur
|
||||
}
|
||||
|
||||
func (it *listPostings) Next() bool {
|
||||
it.idx++
|
||||
return it.idx < len(it.list)
|
||||
if len(it.list) > 0 {
|
||||
it.cur = it.list[0]
|
||||
it.list = it.list[1:]
|
||||
return true
|
||||
}
|
||||
it.cur = 0
|
||||
return false
|
||||
}
|
||||
|
||||
func (it *listPostings) Seek(x uint32) bool {
|
||||
// If the current value satisfies, then return.
|
||||
if it.cur >= x {
|
||||
return true
|
||||
}
|
||||
|
||||
// Do binary search between current position and end.
|
||||
it.idx += sort.Search(len(it.list)-it.idx, func(i int) bool {
|
||||
return it.list[i+it.idx] >= x
|
||||
i := sort.Search(len(it.list), func(i int) bool {
|
||||
return it.list[i] >= x
|
||||
})
|
||||
return it.idx < len(it.list)
|
||||
if i < len(it.list) {
|
||||
it.cur = it.list[i]
|
||||
it.list = it.list[i+1:]
|
||||
return true
|
||||
}
|
||||
it.list = nil
|
||||
return false
|
||||
}
|
||||
|
||||
func (it *listPostings) Err() error {
|
||||
|
@ -259,32 +276,44 @@ func (it *listPostings) Err() error {
|
|||
// big endian numbers.
|
||||
type bigEndianPostings struct {
|
||||
list []byte
|
||||
idx int
|
||||
cur uint32
|
||||
}
|
||||
|
||||
func newBigEndianPostings(list []byte) *bigEndianPostings {
|
||||
return &bigEndianPostings{list: list, idx: -1}
|
||||
return &bigEndianPostings{list: list}
|
||||
}
|
||||
|
||||
func (it *bigEndianPostings) At() uint32 {
|
||||
idx := 4 * it.idx
|
||||
return binary.BigEndian.Uint32(it.list[idx : idx+4])
|
||||
return it.cur
|
||||
}
|
||||
|
||||
func (it *bigEndianPostings) Next() bool {
|
||||
it.idx++
|
||||
return it.idx*4 < len(it.list)
|
||||
if len(it.list) >= 4 {
|
||||
it.cur = binary.BigEndian.Uint32(it.list)
|
||||
it.list = it.list[4:]
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (it *bigEndianPostings) Seek(x uint32) bool {
|
||||
if it.cur >= x {
|
||||
return true
|
||||
}
|
||||
|
||||
num := len(it.list) / 4
|
||||
// Do binary search between current position and end.
|
||||
it.idx += sort.Search(num-it.idx, func(i int) bool {
|
||||
idx := 4 * (it.idx + i)
|
||||
val := binary.BigEndian.Uint32(it.list[idx : idx+4])
|
||||
return val >= x
|
||||
i := sort.Search(num, func(i int) bool {
|
||||
return binary.BigEndian.Uint32(it.list[i*4:]) >= x
|
||||
})
|
||||
return it.idx*4 < len(it.list)
|
||||
if i < num {
|
||||
j := i * 4
|
||||
it.cur = binary.BigEndian.Uint32(it.list[j:])
|
||||
it.list = it.list[j+4:]
|
||||
return true
|
||||
}
|
||||
it.list = nil
|
||||
return false
|
||||
}
|
||||
|
||||
func (it *bigEndianPostings) Err() error {
|
||||
|
|
|
@ -135,21 +135,9 @@ type blockQuerier struct {
|
|||
}
|
||||
|
||||
func (q *blockQuerier) Select(ms ...labels.Matcher) SeriesSet {
|
||||
var (
|
||||
its []Postings
|
||||
absent []string
|
||||
)
|
||||
for _, m := range ms {
|
||||
// If the matcher checks absence of a label, don't select them
|
||||
// but propagate the check into the series set.
|
||||
if _, ok := m.(*labels.EqualMatcher); ok && m.Matches("") {
|
||||
absent = append(absent, m.Name())
|
||||
continue
|
||||
}
|
||||
its = append(its, q.selectSingle(m))
|
||||
}
|
||||
pr := newPostingsReader(q.index)
|
||||
|
||||
p := Intersect(its...)
|
||||
p, absent := pr.Select(ms...)
|
||||
|
||||
if q.postingsMapper != nil {
|
||||
p = q.postingsMapper(p)
|
||||
|
@ -172,50 +160,6 @@ func (q *blockQuerier) Select(ms ...labels.Matcher) SeriesSet {
|
|||
}
|
||||
}
|
||||
|
||||
func (q *blockQuerier) selectSingle(m labels.Matcher) Postings {
|
||||
// Fast-path for equal matching.
|
||||
if em, ok := m.(*labels.EqualMatcher); ok {
|
||||
it, err := q.index.Postings(em.Name(), em.Value())
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
tpls, err := q.index.LabelValues(m.Name())
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
// TODO(fabxc): use interface upgrading to provide fast solution
|
||||
// for equality and prefix matches. Tuples are lexicographically sorted.
|
||||
var res []string
|
||||
|
||||
for i := 0; i < tpls.Len(); i++ {
|
||||
vals, err := tpls.At(i)
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
if m.Matches(vals[0]) {
|
||||
res = append(res, vals[0])
|
||||
}
|
||||
}
|
||||
if len(res) == 0 {
|
||||
return emptyPostings
|
||||
}
|
||||
|
||||
var rit []Postings
|
||||
|
||||
for _, v := range res {
|
||||
it, err := q.index.Postings(m.Name(), v)
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
rit = append(rit, it)
|
||||
}
|
||||
|
||||
return Merge(rit...)
|
||||
}
|
||||
|
||||
func (q *blockQuerier) LabelValues(name string) ([]string, error) {
|
||||
tpls, err := q.index.LabelValues(name)
|
||||
if err != nil {
|
||||
|
@ -241,6 +185,81 @@ func (q *blockQuerier) Close() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// postingsReader is used to select matching postings from an IndexReader.
|
||||
type postingsReader struct {
|
||||
index IndexReader
|
||||
}
|
||||
|
||||
func newPostingsReader(i IndexReader) *postingsReader {
|
||||
return &postingsReader{index: i}
|
||||
}
|
||||
|
||||
func (r *postingsReader) Select(ms ...labels.Matcher) (Postings, []string) {
|
||||
var (
|
||||
its []Postings
|
||||
absent []string
|
||||
)
|
||||
for _, m := range ms {
|
||||
// If the matcher checks absence of a label, don't select them
|
||||
// but propagate the check into the series set.
|
||||
if _, ok := m.(*labels.EqualMatcher); ok && m.Matches("") {
|
||||
absent = append(absent, m.Name())
|
||||
continue
|
||||
}
|
||||
its = append(its, r.selectSingle(m))
|
||||
}
|
||||
|
||||
p := Intersect(its...)
|
||||
|
||||
return p, absent
|
||||
}
|
||||
|
||||
func (r *postingsReader) selectSingle(m labels.Matcher) Postings {
|
||||
// Fast-path for equal matching.
|
||||
if em, ok := m.(*labels.EqualMatcher); ok {
|
||||
it, err := r.index.Postings(em.Name(), em.Value())
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
// TODO(fabxc): use interface upgrading to provide fast solution
|
||||
// for prefix matches. Tuples are lexicographically sorted.
|
||||
tpls, err := r.index.LabelValues(m.Name())
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
|
||||
var res []string
|
||||
|
||||
for i := 0; i < tpls.Len(); i++ {
|
||||
vals, err := tpls.At(i)
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
if m.Matches(vals[0]) {
|
||||
res = append(res, vals[0])
|
||||
}
|
||||
}
|
||||
|
||||
if len(res) == 0 {
|
||||
return emptyPostings
|
||||
}
|
||||
|
||||
var rit []Postings
|
||||
|
||||
for _, v := range res {
|
||||
it, err := r.index.Postings(m.Name(), v)
|
||||
if err != nil {
|
||||
return errPostings{err: err}
|
||||
}
|
||||
rit = append(rit, it)
|
||||
}
|
||||
|
||||
return Merge(rit...)
|
||||
}
|
||||
|
||||
func mergeStrings(a, b []string) []string {
|
||||
maxl := len(a)
|
||||
if len(b) > len(a) {
|
||||
|
|
|
@ -49,9 +49,8 @@ const (
|
|||
WALEntrySamples WALEntryType = 3
|
||||
)
|
||||
|
||||
// WAL is a write ahead log for series data. It can only be written to.
|
||||
// Use WALReader to read back from a write ahead log.
|
||||
type WAL struct {
|
||||
// SegmentWAL is a write ahead log for series data.
|
||||
type SegmentWAL struct {
|
||||
mtx sync.Mutex
|
||||
|
||||
dirFile *os.File
|
||||
|
@ -69,6 +68,28 @@ type WAL struct {
|
|||
donec chan struct{}
|
||||
}
|
||||
|
||||
// WAL is a write ahead log that can log new series labels and samples.
|
||||
// It must be completely read before new entries are logged.
|
||||
type WAL interface {
|
||||
Reader() WALReader
|
||||
Log([]labels.Labels, []RefSample) error
|
||||
Close() error
|
||||
}
|
||||
|
||||
// WALReader reads entries from a WAL.
|
||||
type WALReader interface {
|
||||
At() ([]labels.Labels, []RefSample)
|
||||
Next() bool
|
||||
Err() error
|
||||
}
|
||||
|
||||
// RefSample is a timestamp/value pair associated with a reference to a series.
|
||||
type RefSample struct {
|
||||
Ref uint64
|
||||
T int64
|
||||
V float64
|
||||
}
|
||||
|
||||
const (
|
||||
walDirName = "wal"
|
||||
walSegmentSizeBytes = 256 * 1024 * 1024 // 256 MB
|
||||
|
@ -83,9 +104,9 @@ func init() {
|
|||
castagnoliTable = crc32.MakeTable(crc32.Castagnoli)
|
||||
}
|
||||
|
||||
// OpenWAL opens or creates a write ahead log in the given directory.
|
||||
// OpenSegmentWAL opens or creates a write ahead log in the given directory.
|
||||
// The WAL must be read completely before new data is written.
|
||||
func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL, error) {
|
||||
func OpenSegmentWAL(dir string, logger log.Logger, flushInterval time.Duration) (*SegmentWAL, error) {
|
||||
dir = filepath.Join(dir, walDirName)
|
||||
|
||||
if err := os.MkdirAll(dir, 0777); err != nil {
|
||||
|
@ -99,7 +120,7 @@ func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL,
|
|||
logger = log.NewNopLogger()
|
||||
}
|
||||
|
||||
w := &WAL{
|
||||
w := &SegmentWAL{
|
||||
dirFile: df,
|
||||
logger: logger,
|
||||
flushInterval: flushInterval,
|
||||
|
@ -119,12 +140,12 @@ func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL,
|
|||
|
||||
// Reader returns a new reader over the the write ahead log data.
|
||||
// It must be completely consumed before writing to the WAL.
|
||||
func (w *WAL) Reader() *WALReader {
|
||||
return NewWALReader(w.logger, w)
|
||||
func (w *SegmentWAL) Reader() WALReader {
|
||||
return newWALReader(w, w.logger)
|
||||
}
|
||||
|
||||
// Log writes a batch of new series labels and samples to the log.
|
||||
func (w *WAL) Log(series []labels.Labels, samples []refdSample) error {
|
||||
func (w *SegmentWAL) Log(series []labels.Labels, samples []RefSample) error {
|
||||
if err := w.encodeSeries(series); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -139,7 +160,7 @@ func (w *WAL) Log(series []labels.Labels, samples []refdSample) error {
|
|||
|
||||
// initSegments finds all existing segment files and opens them in the
|
||||
// appropriate file modes.
|
||||
func (w *WAL) initSegments() error {
|
||||
func (w *SegmentWAL) initSegments() error {
|
||||
fns, err := sequenceFiles(w.dirFile.Name(), "")
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -180,7 +201,7 @@ func (w *WAL) initSegments() error {
|
|||
|
||||
// cut finishes the currently active segments and opens the next one.
|
||||
// The encoder is reset to point to the new segment.
|
||||
func (w *WAL) cut() error {
|
||||
func (w *SegmentWAL) cut() error {
|
||||
// Sync current tail to disk and close.
|
||||
if tf := w.tail(); tf != nil {
|
||||
if err := w.sync(); err != nil {
|
||||
|
@ -229,7 +250,7 @@ func (w *WAL) cut() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (w *WAL) tail() *os.File {
|
||||
func (w *SegmentWAL) tail() *os.File {
|
||||
if len(w.files) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
@ -237,14 +258,14 @@ func (w *WAL) tail() *os.File {
|
|||
}
|
||||
|
||||
// Sync flushes the changes to disk.
|
||||
func (w *WAL) Sync() error {
|
||||
func (w *SegmentWAL) Sync() error {
|
||||
w.mtx.Lock()
|
||||
defer w.mtx.Unlock()
|
||||
|
||||
return w.sync()
|
||||
}
|
||||
|
||||
func (w *WAL) sync() error {
|
||||
func (w *SegmentWAL) sync() error {
|
||||
if w.cur == nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -254,7 +275,7 @@ func (w *WAL) sync() error {
|
|||
return fileutil.Fdatasync(w.tail())
|
||||
}
|
||||
|
||||
func (w *WAL) run(interval time.Duration) {
|
||||
func (w *SegmentWAL) run(interval time.Duration) {
|
||||
var tick <-chan time.Time
|
||||
|
||||
if interval > 0 {
|
||||
|
@ -277,7 +298,7 @@ func (w *WAL) run(interval time.Duration) {
|
|||
}
|
||||
|
||||
// Close syncs all data and closes the underlying resources.
|
||||
func (w *WAL) Close() error {
|
||||
func (w *SegmentWAL) Close() error {
|
||||
close(w.stopc)
|
||||
<-w.donec
|
||||
|
||||
|
@ -305,7 +326,7 @@ const (
|
|||
walPageBytes = 16 * minSectorSize
|
||||
)
|
||||
|
||||
func (w *WAL) entry(et WALEntryType, flag byte, buf []byte) error {
|
||||
func (w *SegmentWAL) entry(et WALEntryType, flag byte, buf []byte) error {
|
||||
w.mtx.Lock()
|
||||
defer w.mtx.Unlock()
|
||||
|
||||
|
@ -369,7 +390,7 @@ func putWALBuffer(b []byte) {
|
|||
walBuffers.Put(b)
|
||||
}
|
||||
|
||||
func (w *WAL) encodeSeries(series []labels.Labels) error {
|
||||
func (w *SegmentWAL) encodeSeries(series []labels.Labels) error {
|
||||
if len(series) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
@ -395,7 +416,7 @@ func (w *WAL) encodeSeries(series []labels.Labels) error {
|
|||
return w.entry(WALEntrySeries, walSeriesSimple, buf)
|
||||
}
|
||||
|
||||
func (w *WAL) encodeSamples(samples []refdSample) error {
|
||||
func (w *SegmentWAL) encodeSamples(samples []RefSample) error {
|
||||
if len(samples) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
@ -409,67 +430,65 @@ func (w *WAL) encodeSamples(samples []refdSample) error {
|
|||
// TODO(fabxc): optimize for all samples having the same timestamp.
|
||||
first := samples[0]
|
||||
|
||||
binary.BigEndian.PutUint64(b, first.ref)
|
||||
binary.BigEndian.PutUint64(b, first.Ref)
|
||||
buf = append(buf, b[:8]...)
|
||||
binary.BigEndian.PutUint64(b, uint64(first.t))
|
||||
binary.BigEndian.PutUint64(b, uint64(first.T))
|
||||
buf = append(buf, b[:8]...)
|
||||
|
||||
for _, s := range samples {
|
||||
n := binary.PutVarint(b, int64(s.ref)-int64(first.ref))
|
||||
n := binary.PutVarint(b, int64(s.Ref)-int64(first.Ref))
|
||||
buf = append(buf, b[:n]...)
|
||||
|
||||
n = binary.PutVarint(b, s.t-first.t)
|
||||
n = binary.PutVarint(b, s.T-first.T)
|
||||
buf = append(buf, b[:n]...)
|
||||
|
||||
binary.BigEndian.PutUint64(b, math.Float64bits(s.v))
|
||||
binary.BigEndian.PutUint64(b, math.Float64bits(s.V))
|
||||
buf = append(buf, b[:8]...)
|
||||
}
|
||||
|
||||
return w.entry(WALEntrySamples, walSamplesSimple, buf)
|
||||
}
|
||||
|
||||
// WALReader decodes and emits write ahead log entries.
|
||||
type WALReader struct {
|
||||
// walReader decodes and emits write ahead log entries.
|
||||
type walReader struct {
|
||||
logger log.Logger
|
||||
|
||||
wal *WAL
|
||||
wal *SegmentWAL
|
||||
cur int
|
||||
buf []byte
|
||||
crc32 hash.Hash32
|
||||
|
||||
err error
|
||||
labels []labels.Labels
|
||||
samples []refdSample
|
||||
samples []RefSample
|
||||
}
|
||||
|
||||
// NewWALReader returns a new WALReader over the sequence of the given ReadClosers.
|
||||
func NewWALReader(logger log.Logger, w *WAL) *WALReader {
|
||||
if logger == nil {
|
||||
logger = log.NewNopLogger()
|
||||
func newWALReader(w *SegmentWAL, l log.Logger) *walReader {
|
||||
if l == nil {
|
||||
l = log.NewNopLogger()
|
||||
}
|
||||
r := &WALReader{
|
||||
logger: logger,
|
||||
return &walReader{
|
||||
logger: l,
|
||||
wal: w,
|
||||
buf: make([]byte, 0, 128*4096),
|
||||
crc32: crc32.New(crc32.MakeTable(crc32.Castagnoli)),
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// At returns the last decoded entry of labels or samples.
|
||||
// The returned slices are only valid until the next call to Next(). Their elements
|
||||
// have to be copied to preserve them.
|
||||
func (r *WALReader) At() ([]labels.Labels, []refdSample) {
|
||||
func (r *walReader) At() ([]labels.Labels, []RefSample) {
|
||||
return r.labels, r.samples
|
||||
}
|
||||
|
||||
// Err returns the last error the reader encountered.
|
||||
func (r *WALReader) Err() error {
|
||||
func (r *walReader) Err() error {
|
||||
return r.err
|
||||
}
|
||||
|
||||
// nextEntry retrieves the next entry. It is also used as a testing hook.
|
||||
func (r *WALReader) nextEntry() (WALEntryType, byte, []byte, error) {
|
||||
func (r *walReader) nextEntry() (WALEntryType, byte, []byte, error) {
|
||||
if r.cur >= len(r.wal.files) {
|
||||
return 0, 0, nil, io.EOF
|
||||
}
|
||||
|
@ -492,7 +511,7 @@ func (r *WALReader) nextEntry() (WALEntryType, byte, []byte, error) {
|
|||
|
||||
// Next returns decodes the next entry pair and returns true
|
||||
// if it was succesful.
|
||||
func (r *WALReader) Next() bool {
|
||||
func (r *walReader) Next() bool {
|
||||
r.labels = r.labels[:0]
|
||||
r.samples = r.samples[:0]
|
||||
|
||||
|
@ -549,12 +568,12 @@ func (r *WALReader) Next() bool {
|
|||
return r.err == nil
|
||||
}
|
||||
|
||||
func (r *WALReader) current() *os.File {
|
||||
func (r *walReader) current() *os.File {
|
||||
return r.wal.files[r.cur]
|
||||
}
|
||||
|
||||
// truncate the WAL after the last valid entry.
|
||||
func (r *WALReader) truncate(lastOffset int64) error {
|
||||
func (r *walReader) truncate(lastOffset int64) error {
|
||||
r.logger.Log("msg", "WAL corruption detected; truncating",
|
||||
"err", r.err, "file", r.current().Name(), "pos", lastOffset)
|
||||
|
||||
|
@ -582,7 +601,7 @@ func walCorruptionErrf(s string, args ...interface{}) error {
|
|||
return walCorruptionErr(errors.Errorf(s, args...))
|
||||
}
|
||||
|
||||
func (r *WALReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) {
|
||||
func (r *walReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) {
|
||||
r.crc32.Reset()
|
||||
tr := io.TeeReader(cr, r.crc32)
|
||||
|
||||
|
@ -629,7 +648,7 @@ func (r *WALReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) {
|
|||
return etype, flag, buf, nil
|
||||
}
|
||||
|
||||
func (r *WALReader) decodeSeries(flag byte, b []byte) error {
|
||||
func (r *walReader) decodeSeries(flag byte, b []byte) error {
|
||||
for len(b) > 0 {
|
||||
l, n := binary.Uvarint(b)
|
||||
if n < 1 {
|
||||
|
@ -659,7 +678,7 @@ func (r *WALReader) decodeSeries(flag byte, b []byte) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (r *WALReader) decodeSamples(flag byte, b []byte) error {
|
||||
func (r *walReader) decodeSamples(flag byte, b []byte) error {
|
||||
if len(b) < 16 {
|
||||
return errors.Wrap(errInvalidSize, "header length")
|
||||
}
|
||||
|
@ -670,7 +689,7 @@ func (r *WALReader) decodeSamples(flag byte, b []byte) error {
|
|||
b = b[16:]
|
||||
|
||||
for len(b) > 0 {
|
||||
var smpl refdSample
|
||||
var smpl RefSample
|
||||
|
||||
dref, n := binary.Varint(b)
|
||||
if n < 1 {
|
||||
|
@ -678,19 +697,19 @@ func (r *WALReader) decodeSamples(flag byte, b []byte) error {
|
|||
}
|
||||
b = b[n:]
|
||||
|
||||
smpl.ref = uint64(int64(baseRef) + dref)
|
||||
smpl.Ref = uint64(int64(baseRef) + dref)
|
||||
|
||||
dtime, n := binary.Varint(b)
|
||||
if n < 1 {
|
||||
return errors.Wrap(errInvalidSize, "sample timestamp delta")
|
||||
}
|
||||
b = b[n:]
|
||||
smpl.t = baseTime + dtime
|
||||
smpl.T = baseTime + dtime
|
||||
|
||||
if len(b) < 8 {
|
||||
return errors.Wrapf(errInvalidSize, "sample value bits %d", len(b))
|
||||
}
|
||||
smpl.v = float64(math.Float64frombits(binary.BigEndian.Uint64(b)))
|
||||
smpl.V = float64(math.Float64frombits(binary.BigEndian.Uint64(b)))
|
||||
b = b[8:]
|
||||
|
||||
r.samples = append(r.samples, smpl)
|
||||
|
|
|
@ -59,10 +59,10 @@
|
|||
"revisionTime": "2016-09-30T00:14:02Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "BdLdZP/C2uOO3lqk9X3NCKFpXa4=",
|
||||
"checksumSHA1": "ddYc7mKe3g1x1UUKBrGR4vArJs8=",
|
||||
"path": "github.com/asaskevich/govalidator",
|
||||
"revision": "7b3beb6df3c42abd3509abfc3bcacc0fbfb7c877",
|
||||
"revisionTime": "2016-10-01T16:31:30Z"
|
||||
"revision": "065ea97278837088c52c0cd0d963473f61b2d98c",
|
||||
"revisionTime": "2017-05-13T08:31:01Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "WNfR3yhLjRC5/uccgju/bwrdsxQ=",
|
||||
|
@ -661,22 +661,22 @@
|
|||
"revisionTime": "2016-04-11T19:08:41Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "0wu/AzUWMurN/T5VBKCrvhf7c7E=",
|
||||
"checksumSHA1": "T+9Tl4utHkpYSdVFRpdfLloShTM=",
|
||||
"path": "github.com/prometheus/tsdb",
|
||||
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149",
|
||||
"revisionTime": "2017-05-09T10:52:47Z"
|
||||
"revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
|
||||
"revisionTime": "2017-05-14T09:51:56Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "9EH3v+JdbikCUJAgD4VEOPIaWfs=",
|
||||
"path": "github.com/prometheus/tsdb/chunks",
|
||||
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149",
|
||||
"revisionTime": "2017-05-09T10:52:47Z"
|
||||
"revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
|
||||
"revisionTime": "2017-05-14T09:51:56Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "3RHZcB/ZvIae9K0tJxNlajJg0jA=",
|
||||
"path": "github.com/prometheus/tsdb/labels",
|
||||
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149",
|
||||
"revisionTime": "2017-05-09T10:52:47Z"
|
||||
"revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
|
||||
"revisionTime": "2017-05-14T09:51:56Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "+49Vr4Me28p3cR+gxX5SUQHbbas=",
|
||||
|
|
|
@ -327,7 +327,7 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
|
|||
Path: strings.TrimLeft(name, "/"),
|
||||
}
|
||||
|
||||
tmpl := template.NewTemplateExpander(h.context, string(text), "__console_"+name, data, h.now(), h.queryEngine, h.options.ExternalURL.Path)
|
||||
tmpl := template.NewTemplateExpander(h.context, string(text), "__console_"+name, data, h.now(), h.queryEngine, h.options.ExternalURL)
|
||||
filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
@ -522,7 +522,7 @@ func (h *Handler) executeTemplate(w http.ResponseWriter, name string, data inter
|
|||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
tmpl := template.NewTemplateExpander(h.context, text, name, data, h.now(), h.queryEngine, h.options.ExternalURL.Path)
|
||||
tmpl := template.NewTemplateExpander(h.context, text, name, data, h.now(), h.queryEngine, h.options.ExternalURL)
|
||||
tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options))
|
||||
|
||||
result, err := tmpl.ExpandHTML(nil)
|
||||
|
|
Loading…
Reference in New Issue