Improve PromQL parser performance by making it non-concurrent (#6356)

Before this commit, the PromQL parser ran in two goroutines:
* The lexer goroutine that splits the input into tokens and sent them over a channel to
* the parser goroutine which produces the abstract syntax tree

The Problem with this approach is that the parser spends more time on goroutine creation
and syncronisation than on actual parsing.

This commit removes that concurrency and replaces the channel by a slice based buffer.

Benchmarks show that this makes the up to 7 times faster than before.

Signed-off-by: Tobias Guggenmos <tguggenm@redhat.com>
pull/6357/head
Tobias Guggenmos 2019-11-21 18:43:09 +00:00 committed by Brian Brazil
parent e2dd5b61ef
commit ac3932ea35
3 changed files with 21 additions and 22 deletions

View File

@ -317,13 +317,13 @@ type Pos int
// lexer holds the state of the scanner. // lexer holds the state of the scanner.
type lexer struct { type lexer struct {
input string // The string being scanned. input string // The string being scanned.
state stateFn // The next lexing function to enter. state stateFn // The next lexing function to enter.
pos Pos // Current position in the input. pos Pos // Current position in the input.
start Pos // Start position of this item. start Pos // Start position of this item.
width Pos // Width of last rune read from input. width Pos // Width of last rune read from input.
lastPos Pos // Position of most recent item returned by nextItem. lastPos Pos // Position of most recent item returned by nextItem.
items chan item // Channel of scanned items. items []item // Slice buffer of scanned items.
parenDepth int // Nesting depth of ( ) exprs. parenDepth int // Nesting depth of ( ) exprs.
braceOpen bool // Whether a { is opened. braceOpen bool // Whether a { is opened.
@ -362,7 +362,7 @@ func (l *lexer) backup() {
// emit passes an item back to the client. // emit passes an item back to the client.
func (l *lexer) emit(t ItemType) { func (l *lexer) emit(t ItemType) {
l.items <- item{t, l.start, l.input[l.start:l.pos]} l.items = append(l.items, item{t, l.start, l.input[l.start:l.pos]})
l.start = l.pos l.start = l.pos
} }
@ -408,13 +408,21 @@ func (l *lexer) linePosition() int {
// errorf returns an error token and terminates the scan by passing // errorf returns an error token and terminates the scan by passing
// back a nil pointer that will be the next state, terminating l.nextItem. // back a nil pointer that will be the next state, terminating l.nextItem.
func (l *lexer) errorf(format string, args ...interface{}) stateFn { func (l *lexer) errorf(format string, args ...interface{}) stateFn {
l.items <- item{ItemError, l.start, fmt.Sprintf(format, args...)} l.items = append(l.items, item{ItemError, l.start, fmt.Sprintf(format, args...)})
return nil return nil
} }
// nextItem returns the next item from the input. // nextItem returns the next item from the input.
func (l *lexer) nextItem() item { func (l *lexer) nextItem() item {
item := <-l.items for len(l.items) == 0 {
if l.state != nil {
l.state = l.state(l)
} else {
l.emit(ItemEOF)
}
}
item := l.items[0]
l.items = l.items[1:]
l.lastPos = item.pos l.lastPos = item.pos
return item return item
} }
@ -423,9 +431,8 @@ func (l *lexer) nextItem() item {
func lex(input string) *lexer { func lex(input string) *lexer {
l := &lexer{ l := &lexer{
input: input, input: input,
items: make(chan item), state: lexStatements,
} }
go l.run()
return l return l
} }
@ -434,7 +441,6 @@ func (l *lexer) run() {
for l.state = lexStatements; l.state != nil; { for l.state = lexStatements; l.state != nil; {
l.state = l.state(l) l.state = l.state(l)
} }
close(l.items)
} }
// Release resources used by lexer. // Release resources used by lexer.

View File

@ -695,15 +695,11 @@ func TestLexer(t *testing.T) {
for i, test := range typ.tests { for i, test := range typ.tests {
l := &lexer{ l := &lexer{
input: test.input, input: test.input,
items: make(chan item),
seriesDesc: test.seriesDesc, seriesDesc: test.seriesDesc,
} }
go l.run() l.run()
out := []item{} out := l.items
for it := range l.items {
out = append(out, it)
}
lastItem := out[len(out)-1] lastItem := out[len(out)-1]
if test.fail { if test.fail {

View File

@ -1741,9 +1741,6 @@ func TestRecoverParserRuntime(t *testing.T) {
defer func() { defer func() {
testutil.Equals(t, err, errUnexpected) testutil.Equals(t, err, errUnexpected)
_, ok := <-p.lex.items
testutil.Assert(t, !ok, "lex.items was not closed")
}() }()
defer p.recover(&err) defer p.recover(&err)
// Cause a runtime panic. // Cause a runtime panic.