1
0
mirror of https://github.com/woodpecker-ci/woodpecker.git synced 2024-11-30 08:06:52 +02:00

Merge pull request #2200 from praxist/build_filter_expression_parser

Add expression parser evaulator for build filter
This commit is contained in:
Brad Rydzewski 2017-09-10 14:39:15 -07:00 committed by GitHub
commit 6cdf907c73
8 changed files with 862 additions and 8 deletions

View File

@ -7,6 +7,7 @@ import (
"fmt"
"log"
"strconv"
"time"
oldcontext "golang.org/x/net/context"
@ -22,7 +23,8 @@ import (
"github.com/drone/drone/model"
"github.com/drone/drone/remote"
"github.com/drone/drone/store"
"time"
"github.com/drone/expr"
)
// This file is a complete disaster because I'm trying to wedge in some
@ -89,13 +91,9 @@ func (s *RPC) Next(c context.Context, filter rpc.Filter) (*rpc.Pipeline, error)
}
}
fn := func(task *queue.Task) bool {
for k, v := range filter.Labels {
if task.Labels[k] != v {
return false
}
}
return true
fn, err := createFilterFunc(filter)
if err != nil {
return nil, err
}
task, err := s.queue.Poll(c, fn)
if err != nil {
@ -469,6 +467,32 @@ func (s *RPC) checkCancelled(pipeline *rpc.Pipeline) (bool, error) {
return false, err
}
func createFilterFunc(filter rpc.Filter) (queue.Filter, error) {
var st *expr.Selector
var err error
if filter.Expr != "" {
st, err = expr.ParseString(filter.Expr)
if err != nil {
return nil, err
}
}
return func(task *queue.Task) bool {
if st != nil {
match, _ := st.Eval(expr.NewRow(task.Labels))
return match
}
for k, v := range filter.Labels {
if task.Labels[k] != v {
return false
}
}
return true
}, nil
}
//
//
//

5
vendor/github.com/drone/expr/README generated vendored Normal file
View File

@ -0,0 +1,5 @@
Go package for parsing and evaluating SQL expressions.
Documentation:
http://godoc.org/github.com/drone/expr

158
vendor/github.com/drone/expr/eval.go generated vendored Normal file
View File

@ -0,0 +1,158 @@
package expr
import (
"bytes"
"path/filepath"
"regexp"
"github.com/drone/expr/parse"
)
// state represents the state of an execution. It's not part of the
// statement so that multiple executions of the same statement
// can execute in parallel.
type state struct {
node parse.Node
vars Row
}
// at marks the state to be on node n, for error reporting.
func (s *state) at(node parse.Node) {
s.node = node
}
// Walk functions step through the major pieces of the template structure,
// generating output as they go.
func (s *state) walk(node parse.BoolExpr) bool {
s.at(node)
switch node := node.(type) {
case *parse.ComparisonExpr:
return s.eval(node)
case *parse.AndExpr:
return s.walk(node.Left) && s.walk(node.Right)
case *parse.OrExpr:
return s.walk(node.Left) || s.walk(node.Right)
case *parse.NotExpr:
return !s.walk(node.Expr)
case *parse.ParenBoolExpr:
return s.walk(node.Expr)
default:
panic("invalid node type")
}
}
func (s *state) eval(node *parse.ComparisonExpr) bool {
switch node.Operator {
case parse.OperatorEq:
return s.evalEq(node)
case parse.OperatorGt:
return s.evalGt(node)
case parse.OperatorGte:
return s.evalGte(node)
case parse.OperatorLt:
return s.evalLt(node)
case parse.OperatorLte:
return s.evalLte(node)
case parse.OperatorNeq:
return !s.evalEq(node)
case parse.OperatorGlob:
return s.evalGlob(node)
case parse.OperatorNotGlob:
return !s.evalGlob(node)
case parse.OperatorRe:
return s.evalRegexp(node)
case parse.OperatorNotRe:
return !s.evalRegexp(node)
case parse.OperatorIn:
return s.evalIn(node)
case parse.OperatorNotIn:
return !s.evalIn(node)
default:
panic("inalid operator type")
}
}
func (s *state) evalEq(node *parse.ComparisonExpr) bool {
return bytes.Equal(
s.toValue(node.Left),
s.toValue(node.Right),
)
}
func (s *state) evalGt(node *parse.ComparisonExpr) bool {
return bytes.Compare(
s.toValue(node.Left),
s.toValue(node.Right),
) == 1
}
func (s *state) evalGte(node *parse.ComparisonExpr) bool {
return bytes.Compare(
s.toValue(node.Left),
s.toValue(node.Right),
) >= 0
}
func (s *state) evalLt(node *parse.ComparisonExpr) bool {
return bytes.Compare(
s.toValue(node.Left),
s.toValue(node.Right),
) == -1
}
func (s *state) evalLte(node *parse.ComparisonExpr) bool {
return bytes.Compare(
s.toValue(node.Left),
s.toValue(node.Right),
) <= 0
}
func (s *state) evalGlob(node *parse.ComparisonExpr) bool {
match, _ := filepath.Match(
string(s.toValue(node.Right)),
string(s.toValue(node.Left)),
)
return match
}
func (s *state) evalRegexp(node *parse.ComparisonExpr) bool {
match, _ := regexp.Match(
string(s.toValue(node.Right)),
s.toValue(node.Left),
)
return match
}
func (s *state) evalIn(node *parse.ComparisonExpr) bool {
left := s.toValue(node.Left)
right, ok := node.Right.(*parse.ArrayLit)
if !ok {
panic("expected array literal")
}
for _, expr := range right.Values {
if bytes.Equal(left, s.toValue(expr)) {
return true
}
}
return false
}
func (s *state) toValue(expr parse.ValExpr) []byte {
switch node := expr.(type) {
case *parse.Field:
return s.vars.Field(node.Name)
case *parse.BasicLit:
return node.Value
default:
panic("invalid expression type")
}
}
// errRecover is the handler that turns panics into returns.
func errRecover(err *error) {
if e := recover(); e != nil {
*err = e.(error)
}
}

265
vendor/github.com/drone/expr/parse/lex.go generated vendored Normal file
View File

@ -0,0 +1,265 @@
package parse
import (
"unicode"
"unicode/utf8"
)
// token is a lexical token.
type token uint
// list of lexical tokens.
const (
// special tokens
tokenIllegal token = iota
tokenEOF
// identifiers and basic type literals
tokenIdent
tokenText
tokenReal
tokenInteger
// operators and delimiters
tokenEq // ==
tokenLt // <
tokenLte // <=
tokenGt // >
tokenGte // >=
tokenNeq // !=
tokenComma // ,
tokenLparen // (
tokenRparen // )
// keywords
tokenNot
tokenAnd
tokenOr
tokenIn
tokenGlob
tokenRegexp
tokenTrue
tokenFalse
)
// lexer implements a lexical scanner that reads unicode characters
// and tokens from a byte buffer.
type lexer struct {
buf []byte
pos int
start int
width int
}
// scan reads the next token or Unicode character from source and
// returns it. It returns EOF at the end of the source.
func (l *lexer) scan() token {
l.start = l.pos
l.skipWhitespace()
r := l.read()
switch {
case isIdent(r):
l.unread()
return l.scanIdent()
case isQuote(r):
l.unread()
return l.scanQuote()
case isNumeric(r):
l.unread()
return l.scanNumber()
case isCompare(r):
l.unread()
return l.scanCompare()
}
switch r {
case eof:
return tokenEOF
case '(':
return tokenLparen
case ')':
return tokenRparen
case ',':
return tokenComma
}
return tokenIllegal
}
// peek reads the next token or Unicode character from source and
// returns it without advancing the scanner.
func (l *lexer) peek() token {
var (
pos = l.pos
start = l.start
width = l.width
)
tok := l.scan()
l.pos = pos
l.start = start
l.width = width
return tok
}
// bytes returns the bytes corresponding to the most recently scanned
// token. Valid after calling Scan().
func (l *lexer) bytes() []byte {
return l.buf[l.start:l.pos]
}
// string returns the string corresponding to the most recently scanned
// token. Valid after calling Scan().
func (l *lexer) string() string {
return string(l.bytes())
}
// init initializes a scanner with a new buffer.
func (l *lexer) init(buf []byte) {
l.buf = buf
l.pos = 0
l.start = 0
l.width = 0
}
func (l *lexer) scanIdent() token {
for {
if r := l.read(); r == eof {
break
} else if !isIdent(r) {
l.unread()
break
}
}
ident := l.bytes()
switch string(ident) {
case "NOT", "not":
return tokenNot
case "AND", "and":
return tokenAnd
case "OR", "or":
return tokenOr
case "IN", "in":
return tokenIn
case "GLOB", "glob":
return tokenGlob
case "REGEXP", "regexp":
return tokenRegexp
case "TRUE", "true":
return tokenTrue
case "FALSE", "false":
return tokenFalse
}
return tokenIdent
}
func (l *lexer) scanQuote() (tok token) {
l.read() // consume first quote
for {
if r := l.read(); r == eof {
return tokenIllegal
} else if isQuote(r) {
break
}
}
return tokenText
}
func (l *lexer) scanNumber() token {
for {
if r := l.read(); r == eof {
break
} else if !isNumeric(r) {
l.unread()
break
}
}
return tokenInteger
}
func (l *lexer) scanCompare() (tok token) {
switch l.read() {
case '=':
tok = tokenEq
case '!':
tok = tokenNeq
case '>':
tok = tokenGt
case '<':
tok = tokenLt
}
r := l.read()
switch {
case tok == tokenGt && r == '=':
tok = tokenGte
case tok == tokenLt && r == '=':
tok = tokenLte
case tok == tokenEq && r == '=':
tok = tokenEq
case tok == tokenNeq && r == '=':
tok = tokenNeq
case tok == tokenNeq && r != '=':
tok = tokenIllegal
default:
l.unread()
}
return
}
func (l *lexer) skipWhitespace() {
for {
if r := l.read(); r == eof {
break
} else if !isWhitespace(r) {
l.unread()
break
}
}
l.ignore()
}
func (l *lexer) read() rune {
if l.pos >= len(l.buf) {
l.width = 0
return eof
}
r, w := utf8.DecodeRune(l.buf[l.pos:])
l.width = w
l.pos += l.width
return r
}
func (l *lexer) unread() {
l.pos -= l.width
}
func (l *lexer) ignore() {
l.start = l.pos
}
// eof rune sent when end of file is reached
var eof = rune(0)
func isWhitespace(r rune) bool {
return r == ' ' || r == '\t' || r == '\n'
}
func isNumeric(r rune) bool {
return unicode.IsDigit(r) || r == '.'
}
func isQuote(r rune) bool {
return r == '\''
}
func isCompare(r rune) bool {
return r == '=' || r == '!' || r == '>' || r == '<'
}
func isIdent(r rune) bool {
return unicode.IsLetter(r) || r == '_' || r == '-'
}

117
vendor/github.com/drone/expr/parse/node.go generated vendored Normal file
View File

@ -0,0 +1,117 @@
package parse
// Node is an element in the parse tree.
type Node interface {
node()
}
// ValExpr defines a value expression.
type ValExpr interface {
Node
value()
}
// BoolExpr defines a boolean expression.
type BoolExpr interface {
Node
bool()
}
// An expression is represented by a tree consisting of one
// or more of the following concrete expression nodes.
//
type (
// ComparisonExpr represents a two-value comparison expression.
ComparisonExpr struct {
Operator Operator
Left, Right ValExpr
}
// AndExpr represents an AND expression.
AndExpr struct {
Left, Right BoolExpr
}
// OrExpr represents an OR expression.
OrExpr struct {
Left, Right BoolExpr
}
// NotExpr represents a NOT expression.
NotExpr struct {
Expr BoolExpr
}
// ParenBoolExpr represents a parenthesized boolean expression.
ParenBoolExpr struct {
Expr BoolExpr
}
// BasicLit represents a basic literal.
BasicLit struct {
Kind Literal // INT, REAL, TEXT
Value []byte
}
// ArrayLit represents an array literal.
ArrayLit struct {
Values []ValExpr
}
// Field represents a value lookup by name.
Field struct {
Name []byte
}
)
// Operator identifies the type of operator.
type Operator int
// Comparison operators.
const (
OperatorEq Operator = iota
OperatorLt
OperatorLte
OperatorGt
OperatorGte
OperatorNeq
OperatorIn
OperatorRe
OperatorGlob
OperatorNotIn
OperatorNotRe
OperatorNotGlob
)
// Literal identifies the type of literal.
type Literal int
// The list of possible literal kinds.
const (
LiteralBool Literal = iota
LiteralInt
LiteralReal
LiteralText
)
// node() defines the node in a parse tree
func (x *ComparisonExpr) node() {}
func (x *AndExpr) node() {}
func (x *OrExpr) node() {}
func (x *NotExpr) node() {}
func (x *ParenBoolExpr) node() {}
func (x *BasicLit) node() {}
func (x *ArrayLit) node() {}
func (x *Field) node() {}
// bool() defines the node as a boolean expression.
func (x *ComparisonExpr) bool() {}
func (x *AndExpr) bool() {}
func (x *OrExpr) bool() {}
func (x *NotExpr) bool() {}
func (x *ParenBoolExpr) bool() {}
// value() defines the node as a value expression.
func (x *BasicLit) value() {}
func (x *ArrayLit) value() {}
func (x *Field) value() {}

223
vendor/github.com/drone/expr/parse/parse.go generated vendored Normal file
View File

@ -0,0 +1,223 @@
package parse
import (
"bytes"
"fmt"
)
// Tree is the representation of a single parsed SQL statement.
type Tree struct {
Root BoolExpr
// Parsing only; cleared after parse.
lex *lexer
}
// Parse parses the SQL statement and returns a Tree.
func Parse(buf []byte) (*Tree, error) {
t := new(Tree)
t.lex = new(lexer)
return t.Parse(buf)
}
// Parse parses the SQL statement buffer to construct an ast
// representation for execution.
func (t *Tree) Parse(buf []byte) (tree *Tree, err error) {
defer t.recover(&err)
t.lex.init(buf)
t.Root = t.parseExpr()
return t, nil
}
// recover is the handler that turns panics into returns.
func (t *Tree) recover(err *error) {
if e := recover(); e != nil {
*err = e.(error)
}
}
// errorf formats the error and terminates processing.
func (t *Tree) errorf(format string, args ...interface{}) {
t.Root = nil
format = fmt.Sprintf("selector: parse error:%d: %s", t.lex.start, format)
panic(fmt.Errorf(format, args...))
}
func (t *Tree) parseExpr() BoolExpr {
if t.lex.peek() == tokenNot {
t.lex.scan()
return t.parseNot()
}
left := t.parseVal()
node := t.parseComparison(left)
switch t.lex.scan() {
case tokenOr:
return t.parseOr(node)
case tokenAnd:
return t.parseAnd(node)
default:
return node
}
}
func (t *Tree) parseAnd(left BoolExpr) BoolExpr {
node := new(AndExpr)
node.Left = left
node.Right = t.parseExpr()
return node
}
func (t *Tree) parseOr(left BoolExpr) BoolExpr {
node := new(OrExpr)
node.Left = left
node.Right = t.parseExpr()
return node
}
func (t *Tree) parseNot() BoolExpr {
node := new(NotExpr)
node.Expr = t.parseExpr()
return node
}
func (t *Tree) parseComparison(left ValExpr) BoolExpr {
var negate bool
if t.lex.peek() == tokenNot {
t.lex.scan()
negate = true
}
node := new(ComparisonExpr)
node.Operator = t.parseOperator()
node.Left = left
if negate {
switch node.Operator {
case OperatorIn:
node.Operator = OperatorNotIn
case OperatorGlob:
node.Operator = OperatorNotGlob
case OperatorRe:
node.Operator = OperatorNotRe
}
}
switch node.Operator {
case OperatorIn, OperatorNotIn:
node.Right = t.parseList()
case OperatorRe, OperatorNotRe:
// TODO placeholder for custom Regexp Node
node.Right = t.parseVal()
default:
node.Right = t.parseVal()
}
return node
}
func (t *Tree) parseOperator() (op Operator) {
switch t.lex.scan() {
case tokenEq:
return OperatorEq
case tokenGt:
return OperatorGt
case tokenGte:
return OperatorGte
case tokenLt:
return OperatorLt
case tokenLte:
return OperatorLte
case tokenNeq:
return OperatorNeq
case tokenIn:
return OperatorIn
case tokenRegexp:
return OperatorRe
case tokenGlob:
return OperatorGlob
default:
t.errorf("illegal operator")
return
}
}
func (t *Tree) parseVal() ValExpr {
switch t.lex.scan() {
case tokenIdent:
node := new(Field)
node.Name = t.lex.bytes()
return node
case tokenText:
return t.parseText()
case tokenReal, tokenInteger, tokenTrue, tokenFalse:
node := new(BasicLit)
node.Value = t.lex.bytes()
return node
default:
t.errorf("illegal value expression")
return nil
}
}
func (t *Tree) parseList() ValExpr {
if t.lex.scan() != tokenLparen {
t.errorf("unexpected token, expecting (")
return nil
}
node := new(ArrayLit)
for {
next := t.lex.peek()
switch next {
case tokenEOF:
t.errorf("unexpected eof, expecting )")
case tokenComma:
t.lex.scan()
case tokenRparen:
t.lex.scan()
return node
default:
child := t.parseVal()
node.Values = append(node.Values, child)
}
}
}
func (t *Tree) parseText() ValExpr {
node := new(BasicLit)
node.Value = t.lex.bytes()
// this is where we strip the starting and ending quote
// and unescape the string. On the surface this might look
// like it is subject to index out of bounds errors but
// it is safe because it is already verified by the lexer.
node.Value = node.Value[1 : len(node.Value)-1]
node.Value = bytes.Replace(node.Value, quoteEscaped, quoteUnescaped, -1)
return node
}
// errString indicates the string literal does no have the right syntax.
// var errString = errors.New("invalid string literal")
var (
quoteEscaped = []byte("\\'")
quoteUnescaped = []byte("'")
)
// unquote interprets buf as a single-quoted literal, returning the
// value that buf quotes.
// func unquote(buf []byte) ([]byte, error) {
// n := len(buf)
// if n < 2 {
// return nil, errString
// }
// quote := buf[0]
// if quote != quoteUnescaped[0] {
// return nil, errString
// }
// if quote != buf[n-1] {
// return nil, errString
// }
// buf = buf[1 : n-1]
// return bytes.Replace(buf, quoteEscaped, quoteUnescaped, -1), nil
// }

50
vendor/github.com/drone/expr/selector.go generated vendored Normal file
View File

@ -0,0 +1,50 @@
package expr
import "github.com/drone/expr/parse"
// Selector reprents a parsed SQL selector statement.
type Selector struct {
*parse.Tree
}
// Parse parses the SQL statement and returns a new Statement object.
func Parse(b []byte) (selector *Selector, err error) {
selector = new(Selector)
selector.Tree, err = parse.Parse(b)
return
}
// ParseString parses the SQL statement and returns a new Statement object.
func ParseString(s string) (selector *Selector, err error) {
return Parse([]byte(s))
}
// Eval evaluates the SQL statement using the provided data and returns true
// if all conditions are satisfied. If a runtime error is experiences a false
// value is returned along with an error message.
func (s *Selector) Eval(row Row) (match bool, err error) {
defer errRecover(&err)
state := &state{vars: row}
match = state.walk(s.Root)
return
}
// Row defines a row of columnar data.
//
// Note that the field name and field values are represented as []byte
// since stomp header names and values are represented as []byte to avoid
// extra allocations when converting from []byte to string.
type Row interface {
Field([]byte) []byte
}
// NewRow return a Row bound to a map of key value strings.
func NewRow(m map[string]string) Row {
return mapRow(m)
}
type mapRow map[string]string
func (m mapRow) Field(name []byte) []byte {
return []byte(m[string(name)])
}

12
vendor/vendor.json vendored
View File

@ -331,6 +331,18 @@
"revision": "523de92ea410a5756012669fb628fe42a3056b3e",
"revisionTime": "2017-03-25T05:49:59Z"
},
{
"checksumSHA1": "AT++gcbYW/VQxkmbInFJk1Feg3o=",
"path": "github.com/drone/expr",
"revision": "72f4df4a266b7e1e15b75d4ab8e43e273fcbe1d7",
"revisionTime": "2017-09-09T01:06:28Z"
},
{
"checksumSHA1": "ndkZW2hZSw4AE5WQmWS8sPk79NY=",
"path": "github.com/drone/expr/parse",
"revision": "72f4df4a266b7e1e15b75d4ab8e43e273fcbe1d7",
"revisionTime": "2017-09-09T01:06:28Z"
},
{
"checksumSHA1": "40Ns85VYa4smQPcewZ7SOdfLnKU=",
"path": "github.com/fatih/structs",