This commit is contained in:
Adnan Hajdarevic 2021-04-03 18:01:13 +02:00
parent e329b6d9ff
commit 568c711625
138 changed files with 22876 additions and 90497 deletions

205
vendor/github.com/antonmedv/expr/parser/lexer/lexer.go generated vendored Normal file
View file

@ -0,0 +1,205 @@
package lexer
import (
"fmt"
"strings"
"unicode/utf8"
"github.com/antonmedv/expr/file"
)
func Lex(source *file.Source) ([]Token, error) {
l := &lexer{
input: source.Content(),
tokens: make([]Token, 0),
}
l.loc = file.Location{1, 0}
l.prev = l.loc
l.startLoc = l.loc
for state := root; state != nil; {
state = state(l)
}
if l.err != nil {
return nil, l.err.Bind(source)
}
return l.tokens, nil
}
type lexer struct {
input string
state stateFn
tokens []Token
start, end int // current position in input
width int // last rune with
startLoc file.Location // start location
prev, loc file.Location // prev location of end location, end location
err *file.Error
}
const eof rune = -1
func (l *lexer) next() rune {
if l.end >= len(l.input) {
l.width = 0
return eof
}
r, w := utf8.DecodeRuneInString(l.input[l.end:])
l.width = w
l.end += w
l.prev = l.loc
if r == '\n' {
l.loc.Line++
l.loc.Column = 0
} else {
l.loc.Column++
}
return r
}
func (l *lexer) peek() rune {
r := l.next()
l.backup()
return r
}
func (l *lexer) backup() {
l.end -= l.width
l.loc = l.prev
}
func (l *lexer) emit(t Kind) {
l.emitValue(t, l.word())
}
func (l *lexer) emitValue(t Kind, value string) {
l.tokens = append(l.tokens, Token{
Location: l.startLoc,
Kind: t,
Value: value,
})
l.start = l.end
l.startLoc = l.loc
}
func (l *lexer) emitEOF() {
l.tokens = append(l.tokens, Token{
Location: l.prev, // Point to previous position for better error messages.
Kind: EOF,
})
l.start = l.end
l.startLoc = l.loc
}
func (l *lexer) word() string {
return l.input[l.start:l.end]
}
func (l *lexer) ignore() {
l.start = l.end
l.startLoc = l.loc
}
func (l *lexer) accept(valid string) bool {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.backup()
return false
}
func (l *lexer) acceptRun(valid string) {
for strings.ContainsRune(valid, l.next()) {
}
l.backup()
}
func (l *lexer) acceptWord(word string) bool {
pos := l.end
loc := l.loc
prev := l.prev
for _, ch := range word {
if l.next() != ch {
l.end = pos
l.loc = loc
l.prev = prev
return false
}
}
return true
}
func (l *lexer) error(format string, args ...interface{}) stateFn {
if l.err == nil { // show first error
l.err = &file.Error{
Location: l.loc,
Message: fmt.Sprintf(format, args...),
}
}
return nil
}
func digitVal(ch rune) int {
switch {
case '0' <= ch && ch <= '9':
return int(ch - '0')
case 'a' <= lower(ch) && lower(ch) <= 'f':
return int(lower(ch) - 'a' + 10)
}
return 16 // larger than any legal digit val
}
func lower(ch rune) rune { return ('a' - 'A') | ch } // returns lower-case ch iff ch is ASCII letter
func (l *lexer) scanDigits(ch rune, base, n int) rune {
for n > 0 && digitVal(ch) < base {
ch = l.next()
n--
}
if n > 0 {
l.error("invalid char escape")
}
return ch
}
func (l *lexer) scanEscape(quote rune) rune {
ch := l.next() // read character after '/'
switch ch {
case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', quote:
// nothing to do
ch = l.next()
case '0', '1', '2', '3', '4', '5', '6', '7':
ch = l.scanDigits(ch, 8, 3)
case 'x':
ch = l.scanDigits(l.next(), 16, 2)
case 'u':
ch = l.scanDigits(l.next(), 16, 4)
case 'U':
ch = l.scanDigits(l.next(), 16, 8)
default:
l.error("invalid char escape")
}
return ch
}
func (l *lexer) scanString(quote rune) (n int) {
ch := l.next() // read character after quote
for ch != quote {
if ch == '\n' || ch == eof {
l.error("literal not terminated")
return
}
if ch == '\\' {
ch = l.scanEscape(quote)
} else {
ch = l.next()
}
n++
}
return
}

134
vendor/github.com/antonmedv/expr/parser/lexer/state.go generated vendored Normal file
View file

@ -0,0 +1,134 @@
package lexer
import (
"strings"
)
type stateFn func(*lexer) stateFn
func root(l *lexer) stateFn {
switch r := l.next(); {
case r == eof:
l.emitEOF()
return nil
case IsSpace(r):
l.ignore()
return root
case r == '\'' || r == '"':
l.scanString(r)
str, err := unescape(l.word())
if err != nil {
l.error("%v", err)
}
l.emitValue(String, str)
case '0' <= r && r <= '9':
l.backup()
return number
case strings.ContainsRune("([{", r):
l.emit(Bracket)
case strings.ContainsRune(")]}", r):
l.emit(Bracket)
case strings.ContainsRune("#,?:%+-/", r): // single rune operator
l.emit(Operator)
case strings.ContainsRune("&|!=*<>", r): // possible double rune operator
l.accept("&|=*")
l.emit(Operator)
case r == '.':
l.backup()
return dot
case IsAlphaNumeric(r):
l.backup()
return identifier
default:
return l.error("unrecognized character: %#U", r)
}
return root
}
func number(l *lexer) stateFn {
if !l.scanNumber() {
return l.error("bad number syntax: %q", l.word())
}
l.emit(Number)
return root
}
func (l *lexer) scanNumber() bool {
digits := "0123456789_"
// Is it hex?
if l.accept("0") {
// Note: Leading 0 does not mean octal in floats.
if l.accept("xX") {
digits = "0123456789abcdefABCDEF_"
} else if l.accept("oO") {
digits = "01234567_"
} else if l.accept("bB") {
digits = "01_"
}
}
l.acceptRun(digits)
loc, prev, end := l.loc, l.prev, l.end
if l.accept(".") {
// Lookup for .. operator: if after dot there is another dot (1..2), it maybe a range operator.
if l.peek() == '.' {
// We can't backup() here, as it would require two backups,
// and backup() func supports only one for now. So, save and
// restore it here.
l.loc, l.prev, l.end = loc, prev, end
return true
}
l.acceptRun(digits)
}
if l.accept("eE") {
l.accept("+-")
l.acceptRun(digits)
}
// Next thing mustn't be alphanumeric.
if IsAlphaNumeric(l.peek()) {
l.next()
return false
}
return true
}
func dot(l *lexer) stateFn {
l.next()
if l.accept("0123456789") {
l.backup()
return number
}
l.accept(".")
l.emit(Operator)
return root
}
func identifier(l *lexer) stateFn {
loop:
for {
switch r := l.next(); {
case IsAlphaNumeric(r):
// absorb
default:
l.backup()
switch l.word() {
case "not":
return not
case "in", "or", "and", "matches", "contains", "startsWith", "endsWith":
l.emit(Operator)
default:
l.emit(Identifier)
}
break loop
}
}
return root
}
func not(l *lexer) stateFn {
if l.acceptWord(" in") {
l.emit(Operator)
} else {
l.emit(Operator)
}
return root
}

47
vendor/github.com/antonmedv/expr/parser/lexer/token.go generated vendored Normal file
View file

@ -0,0 +1,47 @@
package lexer
import (
"fmt"
"github.com/antonmedv/expr/file"
)
type Kind string
const (
Identifier Kind = "Identifier"
Number = "Number"
String = "String"
Operator = "Operator"
Bracket = "Bracket"
EOF = "EOF"
)
type Token struct {
file.Location
Kind Kind
Value string
}
func (t Token) String() string {
if t.Value == "" {
return string(t.Kind)
}
return fmt.Sprintf("%s(%#v)", t.Kind, t.Value)
}
func (t Token) Is(kind Kind, values ...string) bool {
if len(values) == 0 {
return kind == t.Kind
}
for _, v := range values {
if v == t.Value {
goto found
}
}
return false
found:
return kind == t.Kind
}

194
vendor/github.com/antonmedv/expr/parser/lexer/utils.go generated vendored Normal file
View file

@ -0,0 +1,194 @@
package lexer
import (
"fmt"
"strings"
"unicode"
"unicode/utf8"
)
func IsSpace(r rune) bool {
return unicode.IsSpace(r)
}
func IsAlphaNumeric(r rune) bool {
return IsAlphabetic(r) || unicode.IsDigit(r)
}
func IsAlphabetic(r rune) bool {
return r == '_' || r == '$' || unicode.IsLetter(r)
}
var (
newlineNormalizer = strings.NewReplacer("\r\n", "\n", "\r", "\n")
)
// Unescape takes a quoted string, unquotes, and unescapes it.
func unescape(value string) (string, error) {
// All strings normalize newlines to the \n representation.
value = newlineNormalizer.Replace(value)
n := len(value)
// Nothing to unescape / decode.
if n < 2 {
return value, fmt.Errorf("unable to unescape string")
}
// Quoted string of some form, must have same first and last char.
if value[0] != value[n-1] || (value[0] != '"' && value[0] != '\'') {
return value, fmt.Errorf("unable to unescape string")
}
value = value[1 : n-1]
// The string contains escape characters.
// The following logic is adapted from `strconv/quote.go`
var runeTmp [utf8.UTFMax]byte
buf := make([]byte, 0, 3*n/2)
for len(value) > 0 {
c, multibyte, rest, err := unescapeChar(value)
if err != nil {
return "", err
}
value = rest
if c < utf8.RuneSelf || !multibyte {
buf = append(buf, byte(c))
} else {
n := utf8.EncodeRune(runeTmp[:], c)
buf = append(buf, runeTmp[:n]...)
}
}
return string(buf), nil
}
// unescapeChar takes a string input and returns the following info:
//
// value - the escaped unicode rune at the front of the string.
// multibyte - whether the rune value might require multiple bytes to represent.
// tail - the remainder of the input string.
// err - error value, if the character could not be unescaped.
//
// When multibyte is true the return value may still fit within a single byte,
// but a multibyte conversion is attempted which is more expensive than when the
// value is known to fit within one byte.
func unescapeChar(s string) (value rune, multibyte bool, tail string, err error) {
// 1. Character is not an escape sequence.
switch c := s[0]; {
case c >= utf8.RuneSelf:
r, size := utf8.DecodeRuneInString(s)
return r, true, s[size:], nil
case c != '\\':
return rune(s[0]), false, s[1:], nil
}
// 2. Last character is the start of an escape sequence.
if len(s) <= 1 {
err = fmt.Errorf("unable to unescape string, found '\\' as last character")
return
}
c := s[1]
s = s[2:]
// 3. Common escape sequences shared with Google SQL
switch c {
case 'a':
value = '\a'
case 'b':
value = '\b'
case 'f':
value = '\f'
case 'n':
value = '\n'
case 'r':
value = '\r'
case 't':
value = '\t'
case 'v':
value = '\v'
case '\\':
value = '\\'
case '\'':
value = '\''
case '"':
value = '"'
case '`':
value = '`'
case '?':
value = '?'
// 4. Unicode escape sequences, reproduced from `strconv/quote.go`
case 'x', 'X', 'u', 'U':
n := 0
switch c {
case 'x', 'X':
n = 2
case 'u':
n = 4
case 'U':
n = 8
}
var v rune
if len(s) < n {
err = fmt.Errorf("unable to unescape string")
return
}
for j := 0; j < n; j++ {
x, ok := unhex(s[j])
if !ok {
err = fmt.Errorf("unable to unescape string")
return
}
v = v<<4 | x
}
s = s[n:]
if v > utf8.MaxRune {
err = fmt.Errorf("unable to unescape string")
return
}
value = v
multibyte = true
// 5. Octal escape sequences, must be three digits \[0-3][0-7][0-7]
case '0', '1', '2', '3':
if len(s) < 2 {
err = fmt.Errorf("unable to unescape octal sequence in string")
return
}
v := rune(c - '0')
for j := 0; j < 2; j++ {
x := s[j]
if x < '0' || x > '7' {
err = fmt.Errorf("unable to unescape octal sequence in string")
return
}
v = v*8 + rune(x-'0')
}
if v > utf8.MaxRune {
err = fmt.Errorf("unable to unescape string")
return
}
value = v
s = s[2:]
multibyte = true
// Unknown escape sequence.
default:
err = fmt.Errorf("unable to unescape string")
}
tail = s
return
}
func unhex(b byte) (rune, bool) {
c := rune(b)
switch {
case '0' <= c && c <= '9':
return c - '0', true
case 'a' <= c && c <= 'f':
return c - 'a' + 10, true
case 'A' <= c && c <= 'F':
return c - 'A' + 10, true
}
return 0, false
}

579
vendor/github.com/antonmedv/expr/parser/parser.go generated vendored Normal file
View file

@ -0,0 +1,579 @@
package parser
import (
"fmt"
"regexp"
"strconv"
"strings"
"unicode/utf8"
. "github.com/antonmedv/expr/ast"
"github.com/antonmedv/expr/file"
. "github.com/antonmedv/expr/parser/lexer"
)
type associativity int
const (
left associativity = iota + 1
right
)
type operator struct {
precedence int
associativity associativity
}
type builtin struct {
arity int
}
var unaryOperators = map[string]operator{
"not": {50, left},
"!": {50, left},
"-": {500, left},
"+": {500, left},
}
var binaryOperators = map[string]operator{
"or": {10, left},
"||": {10, left},
"and": {15, left},
"&&": {15, left},
"==": {20, left},
"!=": {20, left},
"<": {20, left},
">": {20, left},
">=": {20, left},
"<=": {20, left},
"not in": {20, left},
"in": {20, left},
"matches": {20, left},
"contains": {20, left},
"startsWith": {20, left},
"endsWith": {20, left},
"..": {25, left},
"+": {30, left},
"-": {30, left},
"*": {60, left},
"/": {60, left},
"%": {60, left},
"**": {70, right},
}
var builtins = map[string]builtin{
"len": {1},
"all": {2},
"none": {2},
"any": {2},
"one": {2},
"filter": {2},
"map": {2},
"count": {2},
}
type parser struct {
tokens []Token
current Token
pos int
err *file.Error
depth int // closure call depth
}
type Tree struct {
Node Node
Source *file.Source
}
func Parse(input string) (*Tree, error) {
source := file.NewSource(input)
tokens, err := Lex(source)
if err != nil {
return nil, err
}
p := &parser{
tokens: tokens,
current: tokens[0],
}
node := p.parseExpression(0)
if !p.current.Is(EOF) {
p.error("unexpected token %v", p.current)
}
if p.err != nil {
return nil, p.err.Bind(source)
}
return &Tree{
Node: node,
Source: source,
}, nil
}
func (p *parser) error(format string, args ...interface{}) {
if p.err == nil { // show first error
p.err = &file.Error{
Location: p.current.Location,
Message: fmt.Sprintf(format, args...),
}
}
}
func (p *parser) next() {
p.pos++
if p.pos >= len(p.tokens) {
p.error("unexpected end of expression")
return
}
p.current = p.tokens[p.pos]
}
func (p *parser) expect(kind Kind, values ...string) {
if p.current.Is(kind, values...) {
p.next()
return
}
p.error("unexpected token %v", p.current)
}
// parse functions
func (p *parser) parseExpression(precedence int) Node {
nodeLeft := p.parsePrimary()
token := p.current
for token.Is(Operator) && p.err == nil {
if op, ok := binaryOperators[token.Value]; ok {
if op.precedence >= precedence {
p.next()
var nodeRight Node
if op.associativity == left {
nodeRight = p.parseExpression(op.precedence + 1)
} else {
nodeRight = p.parseExpression(op.precedence)
}
if token.Is(Operator, "matches") {
var r *regexp.Regexp
var err error
if s, ok := nodeRight.(*StringNode); ok {
r, err = regexp.Compile(s.Value)
if err != nil {
p.error("%v", err)
}
}
nodeLeft = &MatchesNode{
Regexp: r,
Left: nodeLeft,
Right: nodeRight,
}
nodeLeft.SetLocation(token.Location)
} else {
nodeLeft = &BinaryNode{
Operator: token.Value,
Left: nodeLeft,
Right: nodeRight,
}
nodeLeft.SetLocation(token.Location)
}
token = p.current
continue
}
}
break
}
if precedence == 0 {
nodeLeft = p.parseConditionalExpression(nodeLeft)
}
return nodeLeft
}
func (p *parser) parsePrimary() Node {
token := p.current
if token.Is(Operator) {
if op, ok := unaryOperators[token.Value]; ok {
p.next()
expr := p.parseExpression(op.precedence)
node := &UnaryNode{
Operator: token.Value,
Node: expr,
}
node.SetLocation(token.Location)
return p.parsePostfixExpression(node)
}
}
if token.Is(Bracket, "(") {
p.next()
expr := p.parseExpression(0)
p.expect(Bracket, ")") // "an opened parenthesis is not properly closed"
return p.parsePostfixExpression(expr)
}
if p.depth > 0 {
if token.Is(Operator, "#") || token.Is(Operator, ".") {
if token.Is(Operator, "#") {
p.next()
}
node := &PointerNode{}
node.SetLocation(token.Location)
return p.parsePostfixExpression(node)
}
} else {
if token.Is(Operator, "#") || token.Is(Operator, ".") {
p.error("cannot use pointer accessor outside closure")
}
}
return p.parsePrimaryExpression()
}
func (p *parser) parseConditionalExpression(node Node) Node {
var expr1, expr2 Node
for p.current.Is(Operator, "?") && p.err == nil {
p.next()
if !p.current.Is(Operator, ":") {
expr1 = p.parseExpression(0)
p.expect(Operator, ":")
expr2 = p.parseExpression(0)
} else {
p.next()
expr1 = node
expr2 = p.parseExpression(0)
}
node = &ConditionalNode{
Cond: node,
Exp1: expr1,
Exp2: expr2,
}
}
return node
}
func (p *parser) parsePrimaryExpression() Node {
var node Node
token := p.current
switch token.Kind {
case Identifier:
p.next()
switch token.Value {
case "true":
node := &BoolNode{Value: true}
node.SetLocation(token.Location)
return node
case "false":
node := &BoolNode{Value: false}
node.SetLocation(token.Location)
return node
case "nil":
node := &NilNode{}
node.SetLocation(token.Location)
return node
default:
node = p.parseIdentifierExpression(token)
}
case Number:
p.next()
value := strings.Replace(token.Value, "_", "", -1)
if strings.ContainsAny(value, ".eE") {
number, err := strconv.ParseFloat(value, 64)
if err != nil {
p.error("invalid float literal: %v", err)
}
node := &FloatNode{Value: number}
node.SetLocation(token.Location)
return node
} else if strings.Contains(value, "x") {
number, err := strconv.ParseInt(value, 0, 64)
if err != nil {
p.error("invalid hex literal: %v", err)
}
node := &IntegerNode{Value: int(number)}
node.SetLocation(token.Location)
return node
} else {
number, err := strconv.ParseInt(value, 10, 64)
if err != nil {
p.error("invalid integer literal: %v", err)
}
node := &IntegerNode{Value: int(number)}
node.SetLocation(token.Location)
return node
}
case String:
p.next()
node := &StringNode{Value: token.Value}
node.SetLocation(token.Location)
return node
default:
if token.Is(Bracket, "[") {
node = p.parseArrayExpression(token)
} else if token.Is(Bracket, "{") {
node = p.parseMapExpression(token)
} else {
p.error("unexpected token %v", token)
}
}
return p.parsePostfixExpression(node)
}
func (p *parser) parseIdentifierExpression(token Token) Node {
var node Node
if p.current.Is(Bracket, "(") {
var arguments []Node
if b, ok := builtins[token.Value]; ok {
p.expect(Bracket, "(")
// TODO: Add builtins signatures.
if b.arity == 1 {
arguments = make([]Node, 1)
arguments[0] = p.parseExpression(0)
} else if b.arity == 2 {
arguments = make([]Node, 2)
arguments[0] = p.parseExpression(0)
p.expect(Operator, ",")
arguments[1] = p.parseClosure()
}
p.expect(Bracket, ")")
node = &BuiltinNode{
Name: token.Value,
Arguments: arguments,
}
node.SetLocation(token.Location)
} else {
arguments = p.parseArguments()
node = &FunctionNode{
Name: token.Value,
Arguments: arguments,
}
node.SetLocation(token.Location)
}
} else {
node = &IdentifierNode{Value: token.Value}
node.SetLocation(token.Location)
}
return node
}
func (p *parser) parseClosure() Node {
token := p.current
p.expect(Bracket, "{")
p.depth++
node := p.parseExpression(0)
p.depth--
p.expect(Bracket, "}")
closure := &ClosureNode{
Node: node,
}
closure.SetLocation(token.Location)
return closure
}
func (p *parser) parseArrayExpression(token Token) Node {
nodes := make([]Node, 0)
p.expect(Bracket, "[")
for !p.current.Is(Bracket, "]") && p.err == nil {
if len(nodes) > 0 {
p.expect(Operator, ",")
if p.current.Is(Bracket, "]") {
goto end
}
}
node := p.parseExpression(0)
nodes = append(nodes, node)
}
end:
p.expect(Bracket, "]")
node := &ArrayNode{Nodes: nodes}
node.SetLocation(token.Location)
return node
}
func (p *parser) parseMapExpression(token Token) Node {
p.expect(Bracket, "{")
nodes := make([]Node, 0)
for !p.current.Is(Bracket, "}") && p.err == nil {
if len(nodes) > 0 {
p.expect(Operator, ",")
if p.current.Is(Bracket, "}") {
goto end
}
if p.current.Is(Operator, ",") {
p.error("unexpected token %v", p.current)
}
}
var key Node
// a map key can be:
// * a number
// * a string
// * a identifier, which is equivalent to a string
// * an expression, which must be enclosed in parentheses -- (1 + 2)
if p.current.Is(Number) || p.current.Is(String) || p.current.Is(Identifier) {
key = &StringNode{Value: p.current.Value}
key.SetLocation(token.Location)
p.next()
} else if p.current.Is(Bracket, "(") {
key = p.parseExpression(0)
} else {
p.error("a map key must be a quoted string, a number, a identifier, or an expression enclosed in parentheses (unexpected token %v)", p.current)
}
p.expect(Operator, ":")
node := p.parseExpression(0)
pair := &PairNode{Key: key, Value: node}
pair.SetLocation(token.Location)
nodes = append(nodes, pair)
}
end:
p.expect(Bracket, "}")
node := &MapNode{Pairs: nodes}
node.SetLocation(token.Location)
return node
}
func (p *parser) parsePostfixExpression(node Node) Node {
token := p.current
for (token.Is(Operator) || token.Is(Bracket)) && p.err == nil {
if token.Value == "." {
p.next()
token = p.current
p.next()
if token.Kind != Identifier &&
// Operators like "not" and "matches" are valid methods or property names.
(token.Kind != Operator || !isValidIdentifier(token.Value)) {
p.error("expected name")
}
if p.current.Is(Bracket, "(") {
arguments := p.parseArguments()
node = &MethodNode{
Node: node,
Method: token.Value,
Arguments: arguments,
}
node.SetLocation(token.Location)
} else {
node = &PropertyNode{
Node: node,
Property: token.Value,
}
node.SetLocation(token.Location)
}
} else if token.Value == "[" {
p.next()
var from, to Node
if p.current.Is(Operator, ":") { // slice without from [:1]
p.next()
if !p.current.Is(Bracket, "]") { // slice without from and to [:]
to = p.parseExpression(0)
}
node = &SliceNode{
Node: node,
To: to,
}
node.SetLocation(token.Location)
p.expect(Bracket, "]")
} else {
from = p.parseExpression(0)
if p.current.Is(Operator, ":") {
p.next()
if !p.current.Is(Bracket, "]") { // slice without to [1:]
to = p.parseExpression(0)
}
node = &SliceNode{
Node: node,
From: from,
To: to,
}
node.SetLocation(token.Location)
p.expect(Bracket, "]")
} else {
// Slice operator [:] was not found, it should by just index node.
node = &IndexNode{
Node: node,
Index: from,
}
node.SetLocation(token.Location)
p.expect(Bracket, "]")
}
}
} else {
break
}
token = p.current
}
return node
}
func isValidIdentifier(str string) bool {
if len(str) == 0 {
return false
}
h, w := utf8.DecodeRuneInString(str)
if !IsAlphabetic(h) {
return false
}
for _, r := range str[w:] {
if !IsAlphaNumeric(r) {
return false
}
}
return true
}
func (p *parser) parseArguments() []Node {
p.expect(Bracket, "(")
nodes := make([]Node, 0)
for !p.current.Is(Bracket, ")") && p.err == nil {
if len(nodes) > 0 {
p.expect(Operator, ",")
}
node := p.parseExpression(0)
nodes = append(nodes, node)
}
p.expect(Bracket, ")")
return nodes
}