diff --git a/ast/builtins.go b/ast/builtins.go index 7fb5c6ce41..1f954256e1 100644 --- a/ast/builtins.go +++ b/ast/builtins.go @@ -79,11 +79,12 @@ var BuiltinMap map[string]*Builtin var Equality = &Builtin{ Name: "eq", Infix: "=", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, - TargetPos: []int{0, 1}, + types.T, + ), + TargetPos: []int{0, 1, 2}, } /** @@ -94,50 +95,65 @@ var Equality = &Builtin{ var GreaterThan = &Builtin{ Name: "gt", Infix: ">", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + types.T, + ), + + TargetPos: []int{2}, } // GreaterThanEq represents the ">=" comparison operator. var GreaterThanEq = &Builtin{ Name: "gte", Infix: ">=", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + types.T, + ), + + TargetPos: []int{2}, } // LessThan represents the "<" comparison operator. var LessThan = &Builtin{ Name: "lt", Infix: "<", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + types.T, + ), + + TargetPos: []int{2}, } // LessThanEq represents the "<=" comparison operator. var LessThanEq = &Builtin{ Name: "lte", Infix: "<=", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + types.T, + ), + + TargetPos: []int{2}, } // NotEqual represents the "!=" comparison operator. var NotEqual = &Builtin{ Name: "neq", Infix: "!=", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + types.T, + ), + + TargetPos: []int{2}, } /** @@ -148,11 +164,11 @@ var NotEqual = &Builtin{ var Plus = &Builtin{ Name: "plus", Infix: "+", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, types.N, - }, + ), TargetPos: []int{2}, } @@ -161,11 +177,11 @@ var Plus = &Builtin{ var Minus = &Builtin{ Name: "minus", Infix: "-", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny(types.N, types.NewSet(types.A)), types.NewAny(types.N, types.NewSet(types.A)), types.NewAny(types.N, types.NewSet(types.A)), - }, + ), TargetPos: []int{2}, } @@ -173,11 +189,11 @@ var Minus = &Builtin{ var Multiply = &Builtin{ Name: "mul", Infix: "*", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, types.N, - }, + ), TargetPos: []int{2}, } @@ -185,31 +201,31 @@ var Multiply = &Builtin{ var Divide = &Builtin{ Name: "div", Infix: "/", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, types.N, - }, + ), TargetPos: []int{2}, } // Round rounds the number up to the nearest integer. var Round = &Builtin{ Name: "round", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, - }, + ), TargetPos: []int{1}, } // Abs returns the number without its sign. var Abs = &Builtin{ Name: "abs", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, - }, + ), TargetPos: []int{1}, } @@ -223,11 +239,11 @@ var Abs = &Builtin{ var And = &Builtin{ Name: "and", Infix: "&", - Args: []types.Type{ + Decl: types.NewFunction( types.NewSet(types.A), types.NewSet(types.A), types.NewSet(types.A), - }, + ), TargetPos: []int{2}, } @@ -235,11 +251,11 @@ var And = &Builtin{ var Or = &Builtin{ Name: "or", Infix: "|", - Args: []types.Type{ + Decl: types.NewFunction( types.NewSet(types.A), types.NewSet(types.A), types.NewSet(types.A), - }, + ), TargetPos: []int{2}, } @@ -250,7 +266,7 @@ var Or = &Builtin{ // Count takes a collection or string and counts the number of elements in it. var Count = &Builtin{ Name: "count", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), @@ -258,46 +274,46 @@ var Count = &Builtin{ types.S, ), types.N, - }, + ), TargetPos: []int{1}, } // Sum takes an array or set of numbers and sums them. var Sum = &Builtin{ Name: "sum", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny( types.NewSet(types.N), types.NewArray(nil, types.N), ), types.N, - }, + ), TargetPos: []int{1}, } // Max returns the maximum value in a collection. var Max = &Builtin{ Name: "max", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), ), types.A, - }, + ), TargetPos: []int{1}, } // Min returns the minimum value in a collection. var Min = &Builtin{ Name: "min", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny( types.NewSet(types.A), types.NewArray(nil, types.A), ), types.A, - }, + ), TargetPos: []int{1}, } @@ -310,7 +326,7 @@ var Min = &Builtin{ // Boolean false is converted to 0 and boolean true is converted to 1. var ToNumber = &Builtin{ Name: "to_number", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny( types.N, types.S, @@ -318,7 +334,7 @@ var ToNumber = &Builtin{ types.NewNull(), ), types.N, - }, + ), TargetPos: []int{1}, } @@ -330,10 +346,13 @@ var ToNumber = &Builtin{ // position matches the pattern in the first position. var RegexMatch = &Builtin{ Name: "re_match", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + types.T, + ), + + TargetPos: []int{2}, } /** @@ -343,36 +362,36 @@ var RegexMatch = &Builtin{ // Concat joins an array of strings with an input string. var Concat = &Builtin{ Name: "concat", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.NewAny( types.NewSet(types.S), types.NewArray(nil, types.S), ), types.S, - }, + ), TargetPos: []int{2}, } // FormatInt returns the string representation of the number in the given base after converting it to an integer value. var FormatInt = &Builtin{ Name: "format_int", - Args: []types.Type{ + Decl: types.NewFunction( types.N, types.N, types.S, - }, + ), TargetPos: []int{2}, } // IndexOf returns the index of a substring contained inside a string var IndexOf = &Builtin{ Name: "indexof", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, types.N, - }, + ), TargetPos: []int{2}, } @@ -380,70 +399,76 @@ var IndexOf = &Builtin{ // If the length is less than zero, then substring returns the remainder of the string. var Substring = &Builtin{ Name: "substring", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.N, types.N, types.S, - }, + ), TargetPos: []int{3}, } // Contains returns true if the search string is included in the base string var Contains = &Builtin{ Name: "contains", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + types.T, + ), + TargetPos: []int{2}, } // StartsWith returns true if the search string begins with the base string var StartsWith = &Builtin{ Name: "startswith", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + types.T, + ), + TargetPos: []int{2}, } // EndsWith returns true if the search string begins with the base string var EndsWith = &Builtin{ Name: "endswith", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + types.T, + ), + TargetPos: []int{2}, } // Lower returns the input string but with all characters in lower-case var Lower = &Builtin{ Name: "lower", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + ), TargetPos: []int{1}, } // Upper returns the input string but with all characters in upper-case var Upper = &Builtin{ Name: "upper", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + ), TargetPos: []int{1}, } // Split returns an array containing elements of the input string split on a delimiter. var Split = &Builtin{ Name: "split", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, types.NewArray(nil, types.S), - }, + ), TargetPos: []int{2}, } @@ -451,12 +476,12 @@ var Split = &Builtin{ // by the third. var Replace = &Builtin{ Name: "replace", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, types.S, types.S, - }, + ), TargetPos: []int{3}, } @@ -464,22 +489,22 @@ var Replace = &Builtin{ // argument removed. var Trim = &Builtin{ Name: "trim", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, types.S, - }, + ), TargetPos: []int{2}, } // Sprintf returns the given string, formatted. var Sprintf = &Builtin{ Name: "sprintf", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.NewArray(nil, types.A), types.S, - }, + ), TargetPos: []int{2}, } @@ -490,60 +515,60 @@ var Sprintf = &Builtin{ // JSONMarshal serializes the input term. var JSONMarshal = &Builtin{ Name: "json.marshal", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.S, - }, + ), TargetPos: []int{1}, } // JSONUnmarshal deserializes the input string. var JSONUnmarshal = &Builtin{ Name: "json.unmarshal", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.A, - }, + ), TargetPos: []int{1}, } // Base64UrlEncode serializes the input string into base64url encoding. var Base64UrlEncode = &Builtin{ Name: "base64url.encode", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + ), TargetPos: []int{1}, } // Base64UrlDecode deserializes the base64url encoded input string. var Base64UrlDecode = &Builtin{ Name: "base64url.decode", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + ), TargetPos: []int{1}, } // YAMLMarshal serializes the input term. var YAMLMarshal = &Builtin{ Name: "yaml.marshal", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.S, - }, + ), TargetPos: []int{1}, } // YAMLUnmarshal deserializes the input string. var YAMLUnmarshal = &Builtin{ Name: "yaml.unmarshal", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.A, - }, + ), TargetPos: []int{1}, } @@ -554,12 +579,12 @@ var YAMLUnmarshal = &Builtin{ // JWTDecode decodes a JSON Web Token and outputs it as an Object. var JWTDecode = &Builtin{ Name: "io.jwt.decode", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.NewObject(nil, types.NewDynamicProperty(types.A, types.A)), types.S, - }, + ), TargetPos: []int{1, 2, 3}, } @@ -570,30 +595,30 @@ var JWTDecode = &Builtin{ // NowNanos returns the current time since epoch in nanoseconds. var NowNanos = &Builtin{ Name: "time.now_ns", - Args: []types.Type{ + Decl: types.NewFunction( types.N, - }, + ), TargetPos: []int{0}, } // ParseNanos returns the time in nanoseconds parsed from the string in the given format. var ParseNanos = &Builtin{ Name: "time.parse_ns", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, types.N, - }, + ), TargetPos: []int{2}, } // ParseRFC3339Nanos returns the time in nanoseconds parsed from the string in RFC3339 format. var ParseRFC3339Nanos = &Builtin{ Name: "time.parse_rfc3339_ns", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.N, - }, + ), TargetPos: []int{1}, } @@ -605,7 +630,7 @@ var ParseRFC3339Nanos = &Builtin{ // (recursively). var WalkBuiltin = &Builtin{ Name: "walk", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.NewArray( []types.Type{ @@ -614,7 +639,7 @@ var WalkBuiltin = &Builtin{ }, nil, ), - }, + ), TargetPos: []int{1}, } @@ -625,21 +650,21 @@ var WalkBuiltin = &Builtin{ // SetDiff has been replaced by the minus built-in. var SetDiff = &Builtin{ Name: "set_diff", - Args: []types.Type{ + Decl: types.NewFunction( types.NewSet(types.A), types.NewSet(types.A), types.NewSet(types.A), - }, + ), TargetPos: []int{2}, } // Builtin represents a built-in function supported by OPA. Every // built-in function is uniquely identified by a name. type Builtin struct { - Name string // Unique name of built-in function, e.g., (arg1,arg2,...,argN) - Infix string // Unique name of infix operator. Default should be unset. - Args []types.Type // Built-in argument type declaration. - TargetPos []int // Argument positions that bind outputs. Indexing is zero-based. + Name string // Unique name of built-in function, e.g., (arg1,arg2,...,argN) + Infix string // Unique name of infix operator. Default should be unset. + Decl *types.Function // Built-in argument type declaration. + TargetPos []int // Argument positions that bind outputs. Indexing is zero-based. } // Expr creates a new expression for the built-in with the given terms. diff --git a/ast/check.go b/ast/check.go index 873f0e6d34..2487784a52 100644 --- a/ast/check.go +++ b/ast/check.go @@ -24,12 +24,6 @@ type exprChecker func(*TypeEnv, *Expr) *Error type typeChecker struct { errs Errors exprCheckers map[string]exprChecker - - // When checking the types of functions, their inputs need to initially - // be assumed as types.Any. In order to fill the TypeEnv with more accurate - // type information for the inputs, we need to overwrite this types.Any - // after we've infered more accurate typing from the function bodies. - inFunc bool } // newTypeChecker returns a new typeChecker object that has no errors. @@ -86,30 +80,18 @@ func (tc *typeChecker) CheckBody(env *TypeEnv, body Body) (*TypeEnv, Errors) { return env, tc.errs } -// CheckTypes runs type checking on the rules and funcs and returns a TypeEnv if no -// errors are found. The resulting TypeEnv wraps the provided one. The -// resulting TypeEnv will be able to resolve types of refs that refer to rules -// and funcs. +// CheckTypes runs type checking on the rules returns a TypeEnv if no errors +// are found. The resulting TypeEnv wraps the provided one. The resulting +// TypeEnv will be able to resolve types of refs that refer to rules. func (tc *typeChecker) CheckTypes(env *TypeEnv, sorted []util.T) (*TypeEnv, Errors) { if env == nil { env = NewTypeEnv() } else { env = env.wrap() } - for _, s := range sorted { - switch s := s.(type) { - case *Rule: - tc.checkRule(env, s) - case *Func: - // TODO(mmussomele, tsandall): Currently this infers - // function input/output types from the body. We'll want - // to spend some time thinking about whether or not we - // want to keep that. - tc.checkFunc(env, s) - } + tc.checkRule(env, s.(*Rule)) } - return env, tc.errs } @@ -141,90 +123,72 @@ func (tc *typeChecker) checkClosures(env *TypeEnv, expr *Expr) Errors { return result } -func (tc *typeChecker) checkFunc(env *TypeEnv, fn *Func) { - tc.inFunc = true - defer func() { - tc.inFunc = false - }() - - cpy := env.wrap() - for _, arg := range fn.Head.Args { - WalkVars(arg, func(v Var) bool { - cpy.tree.PutOne(v, types.A) - return false - }) - } - - prev := len(tc.errs) - cpy, err := tc.CheckBody(cpy, fn.Body) - - // If this function did not error, there is no reason to return early, - // as that means that its dependencies compiled fine. - if len(err) > prev { - return - } - name := fn.Path().String() - - // Ensure that multiple definitions of this function have consistent argument - // lengths. - cur := env.GetFunc(name) - numArgs := len(fn.Head.Args) - if cur != nil && len(cur)-1 != numArgs { - tc.err(NewError(TypeErr, fn.Head.Loc(), "function definitions for %s have different number of arguments (%d vs %d)", name, numArgs, len(cur)-1)) - return - } - - var argTypes []types.Type - for i, arg := range fn.Head.Args { - tpe := mergeTypes(cpy.Get(arg), cur, i) - argTypes = append(argTypes, tpe) - } - - out := mergeTypes(cpy.Get(fn.Head.Output), cur, numArgs) - argTypes = append(argTypes, out) - env.PutFunc(name, argTypes) -} - func (tc *typeChecker) checkLanguageBuiltins() *TypeEnv { env := NewTypeEnv() for _, bi := range Builtins { - env.PutFunc(string(bi.Name), bi.Args) + env.tree.Put(bi.Ref(), bi.Decl) } - return env } func (tc *typeChecker) checkRule(env *TypeEnv, rule *Rule) { + cpy, err := tc.CheckBody(env, rule.Body) if len(err) == 0 { + path := rule.Path() var tpe types.Type - switch rule.Head.DocKind() { - case CompleteDoc: - typeV := cpy.Get(rule.Head.Value) - if typeV != nil { - exist := env.tree.Get(path) - tpe = types.Or(typeV, exist) - } - case PartialObjectDoc: - typeK := cpy.Get(rule.Head.Key) - typeV := cpy.Get(rule.Head.Value) - if typeK != nil && typeV != nil { - exist := env.tree.Get(path) - typeV = types.Or(types.Values(exist), typeV) - typeK = types.Or(types.Keys(exist), typeK) - tpe = types.NewObject(nil, types.NewDynamicProperty(typeK, typeV)) + if len(rule.Head.Args) > 0 { + + // If args are not referred to in body, infer as any. + WalkVars(rule.Head.Args, func(v Var) bool { + if cpy.Get(v) == nil { + cpy.tree.PutOne(v, types.A) + } + return false + }) + + // Construct function type. + args := make([]types.Type, len(rule.Head.Args)+1) + for i := 0; i < len(rule.Head.Args); i++ { + args[i] = cpy.Get(rule.Head.Args[i]) } - case PartialSetDoc: - typeK := cpy.Get(rule.Head.Key) - if typeK != nil { - exist := env.tree.Get(path) - typeK = types.Or(types.Keys(exist), typeK) - tpe = types.NewSet(typeK) + args[len(args)-1] = cpy.Get(rule.Head.Value) + f := types.NewFunction(args...) + + // Union with existing. + exist := env.tree.Get(path) + tpe = types.Or(exist, f) + + } else { + switch rule.Head.DocKind() { + case CompleteDoc: + typeV := cpy.Get(rule.Head.Value) + if typeV != nil { + exist := env.tree.Get(path) + tpe = types.Or(typeV, exist) + } + case PartialObjectDoc: + typeK := cpy.Get(rule.Head.Key) + typeV := cpy.Get(rule.Head.Value) + if typeK != nil && typeV != nil { + exist := env.tree.Get(path) + typeV = types.Or(types.Values(exist), typeV) + typeK = types.Or(types.Keys(exist), typeK) + tpe = types.NewObject(nil, types.NewDynamicProperty(typeK, typeV)) + } + case PartialSetDoc: + typeK := cpy.Get(rule.Head.Key) + if typeK != nil { + exist := env.tree.Get(path) + typeK = types.Or(types.Keys(exist), typeK) + tpe = types.NewSet(typeK) + } } } + if tpe != nil { env.tree.Put(path, tpe) } @@ -232,11 +196,11 @@ func (tc *typeChecker) checkRule(env *TypeEnv, rule *Rule) { } func (tc *typeChecker) checkExpr(env *TypeEnv, expr *Expr) *Error { - if !expr.IsBuiltin() { + if !expr.IsCall() { return nil } - checker := tc.exprCheckers[expr.Name().String()] + checker := tc.exprCheckers[expr.Operator().String()] if checker != nil { return checker(env, expr) } @@ -245,11 +209,6 @@ func (tc *typeChecker) checkExpr(env *TypeEnv, expr *Expr) *Error { } func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error { - name := expr.Name().String() - expArgs := env.GetFunc(name) - if expArgs == nil { - return NewError(TypeErr, expr.Location, "undefined built-in function %v", name) - } args := expr.Operands() pre := make([]types.Type, len(args)) @@ -257,14 +216,34 @@ func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error { pre[i] = env.Get(args[i]) } + name := expr.Operator() + tpe := env.Get(name) + + if tpe == nil { + return NewError(TypeErr, expr.Location, "undefined function %v", name) + } + + ftpe, ok := tpe.(*types.Function) + if !ok { + return NewError(TypeErr, expr.Location, "undefined function %v", name) + } + + expArgs := append(ftpe.Args(), ftpe.Result()) + if len(args) < len(expArgs) { - return newArgError(expr.Location, name, "too few arguments", pre, expArgs) + // TODO(tsandall): this allows callers to omit the result operand if + // the value is always true. In future, callers should always allowed + // to be able to ignore the result. This leaks into topdown which could + // be improved. + if len(args) != len(expArgs)-1 || types.Compare(ftpe.Result(), types.T) != 0 { + return newArgError(expr.Location, name, "too few arguments", pre, expArgs) + } } else if len(args) > len(expArgs) { return newArgError(expr.Location, name, "too many arguments", pre, expArgs) } for i := range args { - if !unify1(env, args[i], expArgs[i], tc.inFunc) { + if !unify1(env, args[i], expArgs[i]) { post := make([]types.Type, len(args)) for i := range args { post[i] = env.Get(args[i]) @@ -281,7 +260,7 @@ func (tc *typeChecker) checkExprEq(env *TypeEnv, expr *Expr) *Error { a, b := expr.Operand(0), expr.Operand(1) typeA, typeB := env.Get(a), env.Get(b) - if !unify2(env, a, typeA, b, typeB, tc.inFunc) { + if !unify2(env, a, typeA, b, typeB) { err := NewError(TypeErr, expr.Location, "match error") err.Details = &UnificationErrDetail{ Left: typeA, @@ -293,66 +272,74 @@ func (tc *typeChecker) checkExprEq(env *TypeEnv, expr *Expr) *Error { return nil } -func unify2(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type, inFunc bool) bool { +func unify2(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type) bool { nilA := types.Nil(typeA) nilB := types.Nil(typeB) if nilA && !nilB { - return unify1(env, a, typeB, inFunc) + return unify1(env, a, typeB) } else if nilB && !nilA { - return unify1(env, b, typeA, inFunc) + return unify1(env, b, typeA) } else if !nilA && !nilB { return unifies(typeA, typeB) } - switch a := a.Value.(type) { + switch av := a.Value.(type) { case Array: - switch b := b.Value.(type) { + switch bv := b.Value.(type) { case Array: - if len(a) == len(b) { - for i := range a { - if !unify2(env, a[i], env.Get(a[i]), b[i], env.Get(b[i]), inFunc) { + if len(av) == len(bv) { + for i := range av { + if !unify2(env, av[i], env.Get(av[i]), bv[i], env.Get(bv[i])) { return false } } return true } + case Var: + return unify1(env, a, types.A) && unify1(env, b, env.Get(a)) } case Object: - switch b := b.Value.(type) { + switch bv := b.Value.(type) { case Object: - c := a.Intersect(b) - if len(a) == len(b) && len(b) == len(c) { - for i := range c { - if !unify2(env, c[i][1], env.Get(c[i][1]), c[i][2], env.Get(c[i][2]), inFunc) { + cv := av.Intersect(bv) + if len(av) == len(bv) && len(bv) == len(cv) { + for i := range cv { + if !unify2(env, cv[i][1], env.Get(cv[i][1]), cv[i][2], env.Get(cv[i][2])) { return false } } return true } + case Var: + return unify1(env, a, types.A) && unify1(env, b, env.Get(a)) + } + case Var: + if _, ok := b.Value.(Var); ok { + return unify1(env, a, types.A) && unify1(env, b, env.Get(a)) } } return false } -func unify1(env *TypeEnv, term *Term, tpe types.Type, inFunc bool) bool { +func unify1(env *TypeEnv, term *Term, tpe types.Type) bool { switch v := term.Value.(type) { case Array: switch tpe := tpe.(type) { case *types.Array: - return unify1Array(env, v, tpe, inFunc) + return unify1Array(env, v, tpe) case types.Any: if types.Compare(tpe, types.A) == 0 { for i := range v { - unify1(env, v[i], types.A, inFunc) + unify1(env, v[i], types.A) } return true } unifies := false for i := range tpe { - unifies = unify1(env, term, tpe[i], inFunc) || unifies + unifies = unify1(env, term, tpe[i]) || unifies } return unifies } @@ -360,17 +347,17 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, inFunc bool) bool { case Object: switch tpe := tpe.(type) { case *types.Object: - return unify1Object(env, v, tpe, inFunc) + return unify1Object(env, v, tpe) case types.Any: if types.Compare(tpe, types.A) == 0 { for i := range v { - unify1(env, v[i][1], types.A, inFunc) + unify1(env, v[i][1], types.A) } return true } unifies := false for i := range tpe { - unifies = unify1(env, term, tpe[i], inFunc) || unifies + unifies = unify1(env, term, tpe[i]) || unifies } return unifies } @@ -378,18 +365,18 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, inFunc bool) bool { case *Set: switch tpe := tpe.(type) { case *types.Set: - return unify1Set(env, v, tpe, inFunc) + return unify1Set(env, v, tpe) case types.Any: if types.Compare(tpe, types.A) == 0 { v.Iter(func(elem *Term) bool { - unify1(env, elem, types.A, inFunc) + unify1(env, elem, types.A) return true }) return true } unifies := false for i := range tpe { - unifies = unify1(env, term, tpe[i], inFunc) || unifies + unifies = unify1(env, term, tpe[i]) || unifies } return unifies } @@ -398,9 +385,7 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, inFunc bool) bool { return unifies(env.Get(v), tpe) case Var: if exist := env.Get(v); exist != nil { - if e, ok := exist.(types.Any); !ok || len(e) != 0 || !inFunc { - return unifies(exist, tpe) - } + return unifies(exist, tpe) } env.tree.PutOne(term.Value, tpe) return true @@ -412,26 +397,26 @@ func unify1(env *TypeEnv, term *Term, tpe types.Type, inFunc bool) bool { } } -func unify1Array(env *TypeEnv, val Array, tpe *types.Array, inFunc bool) bool { +func unify1Array(env *TypeEnv, val Array, tpe *types.Array) bool { if len(val) != tpe.Len() && tpe.Dynamic() == nil { return false } for i := range val { - if !unify1(env, val[i], tpe.Select(i), inFunc) { + if !unify1(env, val[i], tpe.Select(i)) { return false } } return true } -func unify1Object(env *TypeEnv, val Object, tpe *types.Object, inFunc bool) bool { +func unify1Object(env *TypeEnv, val Object, tpe *types.Object) bool { if len(val) != len(tpe.Keys()) && tpe.DynamicValue() == nil { return false } for i := range val { if IsConstant(val[i][0].Value) { if child := selectConstant(tpe, val[i][0]); child != nil { - if !unify1(env, val[i][1], child, inFunc) { + if !unify1(env, val[i][1], child) { return false } } else { @@ -442,16 +427,16 @@ func unify1Object(env *TypeEnv, val Object, tpe *types.Object, inFunc bool) bool // with all property values of tpe whose keys unify. For now, type // these values as Any. We can investigate stricter inference in // the future. - unify1(env, val[i][1], types.A, inFunc) + unify1(env, val[i][1], types.A) } } return true } -func unify1Set(env *TypeEnv, val *Set, tpe *types.Set, inFunc bool) bool { +func unify1Set(env *TypeEnv, val *Set, tpe *types.Set) bool { of := types.Values(tpe) return !val.Iter(func(elem *Term) bool { - return !unify1(env, elem, of, inFunc) + return !unify1(env, elem, of) }) } @@ -460,8 +445,9 @@ func (tc *typeChecker) err(err *Error) { } type refChecker struct { - env *TypeEnv - errs Errors + env *TypeEnv + errs Errors + checkTerm bool } func newRefChecker(env *TypeEnv) *refChecker { @@ -476,13 +462,25 @@ func (rc *refChecker) Visit(x interface{}) Visitor { case *ArrayComprehension, *ObjectComprehension, *SetComprehension: return nil case *Expr: - if terms, ok := x.Terms.([]*Term); ok { + switch terms := x.Terms.(type) { + case []*Term: for i := 1; i < len(terms); i++ { Walk(rc, terms[i]) } return nil + case *Term: + rc.checkTerm = true + Walk(rc, terms) + rc.checkTerm = false + return nil } case Ref: + if rc.checkTerm { + if err := rc.checkApply(rc.env, x); err != nil { + rc.errs = append(rc.errs, err) + return nil + } + } if err := rc.checkRef(rc.env, rc.env.tree, x, 0); err != nil { rc.errs = append(rc.errs, err) } @@ -490,6 +488,15 @@ func (rc *refChecker) Visit(x interface{}) Visitor { return rc } +func (rc *refChecker) checkApply(curr *TypeEnv, ref Ref) *Error { + if tpe := curr.Get(ref); tpe != nil { + if _, ok := tpe.(*types.Function); ok { + return newRefErrUnsupported(ref[0].Location, ref, len(ref)-1, tpe) + } + } + return nil +} + func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx int) *Error { if idx == len(ref) { @@ -503,7 +510,17 @@ func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx i child := node.Child(head.Value) if child == nil { - return rc.checkRefNext(curr, ref) + + if curr.next != nil { + next := curr.next + return rc.checkRef(next, next.tree, ref, 0) + } + + if RootDocumentNames.Contains(ref[0]) { + return rc.checkRefLeaf(types.A, ref, 1) + } + + return rc.checkRefLeaf(types.A, ref, 0) } if child.Leaf() { @@ -592,8 +609,7 @@ func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error { if !unifies(tpe, types.NewSet(types.A)) { return newRefErrInvalid(ref[0].Location, ref, idx, tpe, types.NewSet(types.A), nil) } - - if !unify1(rc.env, head, keys, false) { + if !unify1(rc.env, head, keys) { return newRefErrInvalid(ref[0].Location, ref, idx, rc.env.Get(head), keys, nil) } @@ -608,34 +624,24 @@ func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error { return rc.checkRefLeaf(types.Values(tpe), ref, idx+1) } -func (rc *refChecker) checkRefNext(curr *TypeEnv, ref Ref) *Error { - - if curr.next != nil { - next := curr.next - return rc.checkRef(next, next.tree, ref, 0) - } - - if RootDocumentNames.Contains(ref[0]) { - // Skip ref head. Type of head should not be inferred from this - // descent. - return rc.checkRefLeaf(types.A, ref, 1) - } - - return newRefErrMissing(ref[0].Location, ref) -} - func unifies(a, b types.Type) bool { if a == nil || b == nil { return false } + var unified bool + if anyA, ok := a.(types.Any); ok { - return unifiesAny(anyA, b) + unified = unifiesAny(anyA, b) } if anyB, ok := b.(types.Any); ok { - return unifiesAny(anyB, a) + unified = unified || unifiesAny(anyB, a) + } + + if unified { + return true } switch a := a.(type) { @@ -669,21 +675,24 @@ func unifies(a, b types.Type) bool { return false } return unifies(types.Values(a), types.Values(b)) + case *types.Function: + // TODO(tsandall): revisit once functions become first-class values. + return false default: panic("unreachable") } } func unifiesAny(a types.Any, b types.Type) bool { - if types.Compare(a, types.A) == 0 { - return true + if _, ok := b.(*types.Function); ok { + return false } for i := range a { if unifies(a[i], b) { return true } } - return false + return len(a) == 0 } func unifiesArrays(a, b *types.Array) bool { @@ -731,26 +740,6 @@ func unifiesObjectsStatic(a, b *types.Object) bool { return true } -func mergeTypes(found types.Type, cur []types.Type, i int) types.Type { - if found == nil { - found = types.A - } - - if cur == nil { - return found - } - - return types.Or(found, cur[i]) -} - -func builtinNameRef(name String) Ref { - n, err := ParseRef(string(name)) - if err != nil { - n = Ref([]*Term{{Value: name}}) - } - return n -} - // typeErrorCause defines an interface to determine the reason for a type // error. The type error details implement this interface so that type checking // can report more actionable errors. @@ -856,21 +845,6 @@ func (r *RefErrInvalidDetail) Lines() []string { return lines } -// RefErrMissingDetail describes an undefined reference error where the type -// information for the head of the reference is missing. -type RefErrMissingDetail struct { - Ref Ref -} - -// Lines returns the string representation of the detail. -func (r *RefErrMissingDetail) Lines() []string { - return []string{ - r.Ref.String(), - strings.Repeat("^", len(r.Ref[0].String())), - "missing type information", - } -} - func formatArgs(args []types.Type) string { buf := make([]string, len(args)) for i := range args { @@ -901,19 +875,11 @@ func newRefErrUnsupported(loc *Location, ref Ref, idx int, have types.Type) *Err return err } -func newRefErrMissing(loc *Location, ref Ref) *Error { - err := newRefError(loc, ref) - err.Details = &RefErrMissingDetail{ - Ref: ref, - } - return err -} - func newRefError(loc *Location, ref Ref) *Error { return NewError(TypeErr, loc, "undefined ref: %v", ref) } -func newArgError(loc *Location, builtinName, msg string, have []types.Type, want []types.Type) *Error { +func newArgError(loc *Location, builtinName Ref, msg string, have []types.Type, want []types.Type) *Error { err := NewError(TypeErr, loc, "%v: %v", builtinName, msg) err.Details = &ArgErrDetail{ Have: have, diff --git a/ast/check_test.go b/ast/check_test.go index 541c4bb9d0..e0ea4004e8 100644 --- a/ast/check_test.go +++ b/ast/check_test.go @@ -21,34 +21,34 @@ func TestCheckInference(t *testing.T) { // fake_builtin_1([str1,str2]) RegisterBuiltin(&Builtin{ Name: "fake_builtin_1", - Args: []types.Type{ + Decl: types.NewFunction( types.NewArray( []types.Type{types.S, types.S}, nil, ), - }, + ), TargetPos: []int{0}, }) // fake_builtin_2({"a":str1,"b":str2}) RegisterBuiltin(&Builtin{ Name: "fake_builtin_2", - Args: []types.Type{ + Decl: types.NewFunction( types.NewObject( []*types.StaticProperty{ {"a", types.S}, {"b", types.S}, }, nil, ), - }, + ), TargetPos: []int{0}, }) // fake_builtin_3({str1,str2,...}) RegisterBuiltin(&Builtin{ Name: "fake_builtin_3", - Args: []types.Type{ + Decl: types.NewFunction( types.NewSet(types.S), - }, + ), TargetPos: []int{0}, }) @@ -70,7 +70,7 @@ func TestCheckInference(t *testing.T) { Var("z"): types.N, }}, {"array-nested", "[x, 1] = [true, y]", map[Var]types.Type{ - Var("x"): types.B, + Var("x"): types.T, Var("y"): types.N, }}, {"array-transitive", "y = [[2], 1]; [[x], 1] = y", map[Var]types.Type{ @@ -159,7 +159,7 @@ func TestCheckInference(t *testing.T) { 3]; x = a[1].foo[_].bar`, map[Var]types.Type{ - Var("x"): types.NewAny(types.NewNull(), types.B), + Var("x"): types.NewAny(types.NewNull(), types.T), }}, {"local-reference-var", ` @@ -191,8 +191,8 @@ func TestCheckInference(t *testing.T) { `, map[Var]types.Type{ Var("i"): types.N, Var("j"): types.S, - Var("k"): types.NewAny(types.S, types.N, types.B), - Var("x"): types.NewAny(types.S, types.N, types.B), + Var("k"): types.NewAny(types.S, types.N, types.T), + Var("x"): types.NewAny(types.S, types.N, types.T), }}, {"local-reference-var-any", ` a = [[], {}]; @@ -304,7 +304,7 @@ func TestCheckInferenceRules(t *testing.T) { ref string expected types.Type }{ - {"trivial", ruleset1, `data.a.trivial`, types.B}, + {"trivial", ruleset1, `data.a.trivial`, types.T}, {"complete-doc", ruleset1, `data.a.complete`, types.NewArray( []types.Type{types.NewObject( @@ -347,7 +347,7 @@ func TestCheckInferenceRules(t *testing.T) { types.NewAny( types.S, types.N, - types.B), + types.T), )}, {"iteration-keys", ruleset1, "data.iteration.keys", types.NewSet( @@ -373,11 +373,11 @@ func TestCheckInferenceRules(t *testing.T) { types.NewDynamicProperty(types.S, types.NewAny(types.S, types.N)), )}, - {"ref", ruleset1, "data.b.trivial_ref", types.B}, + {"ref", ruleset1, "data.b.trivial_ref", types.T}, {"ref-transitive", ruleset1, "data.b.transitive_ref", types.NewArray( []types.Type{ - types.B, + types.T, }, nil, )}, @@ -385,7 +385,7 @@ func TestCheckInferenceRules(t *testing.T) { {"prefix", ruleset1, `data.prefix.a.b`, types.NewObject( []*types.StaticProperty{{ "c", types.NewObject( - []*types.StaticProperty{{"d", types.B}}, + []*types.StaticProperty{{"d", types.T}}, types.NewDynamicProperty(types.S, types.A), ), }}, @@ -546,7 +546,6 @@ func TestCheckMatchErrors(t *testing.T) { {"object", `{{"a": 1, "b": 2} = null}`}, {"object-nested", `{ {"a": 1, "b": "2"} = {"a": 1, "b": 2} }`}, {"object-nested-2", `{ {"a": 1} = {"a": 1, "b": "2"} }`}, - {"object-dynamic", `{ obj2 = obj1 }`}, {"set", "{{1,2,3} = null}"}, } for _, tc := range tests { @@ -565,7 +564,7 @@ func TestCheckBuiltinErrors(t *testing.T) { RegisterBuiltin(&Builtin{ Name: "fake_builtin_2", - Args: []types.Type{ + Decl: types.NewFunction( types.NewAny(types.NewObject( []*types.StaticProperty{ {"a", types.S}, @@ -577,7 +576,7 @@ func TestCheckBuiltinErrors(t *testing.T) { {"c", types.S}, }, nil, )), - }, + ), TargetPos: []int{0}, }) @@ -593,11 +592,13 @@ func TestCheckBuiltinErrors(t *testing.T) { {"objects-any", `fake_builtin_2({"a": a, "c": c})`}, {"objects-bad-input", `sum({"a": 1, "b": 2}, x)`}, {"sets-any", `sum({1,2,"3",4}, x)`}, - {"xxx", "data.test.p + data.deadbeef = q"}, + {"virtual-ref", `data.test.p + data.deabeef = 0`}, + {"function-ref", `data.test.f(1, data.test.f)`}, } env := newTestEnv([]string{ `p = "foo" { true }`, + `f(x) = x { true }`, }) for _, tc := range tests { @@ -638,25 +639,6 @@ func TestCheckRefErrUnsupported(t *testing.T) { } -func TestCheckRefErrMissing(t *testing.T) { - - query := `arr = [1,2,3]; arr[0].deadbeef = elem; elem[0]` - - _, errs := newTypeChecker().CheckBody(nil, MustParseBody(query)) - if len(errs) != 2 { - t.Fatalf("Expected exactly two errors but got: %v", errs) - } - - details, ok := errs[1].Details.(*RefErrMissingDetail) - if !ok { - t.Fatalf("Expected ref err missing but got: %v", errs) - } - - if !details.Ref.Equal(MustParseRef("elem[0]")) { - t.Fatalf("Expected ref elem[0] but got: %v", errs) - } -} - func TestCheckRefErrInvalid(t *testing.T) { env := newTestEnv([]string{ @@ -796,8 +778,8 @@ func TestUserFunctionsTypeInference(t *testing.T) { `foo([a, b]) = y { split(a, b, y) }`, `bar(x) = y { count(x, y) }`, `baz([x, y]) = z { sprintf("%s%s", [x, y], z) }`, - `buz({"bar": x, "foo": y}) = {a: b} { upper(y, a); json.unmarshal(x, b) }`, - `foobar(x) = y { buz({"bar": x, "foo": x}, a); baz([a["{5: true}"], "BUZ"], y) }`, + `qux({"bar": x, "foo": y}) = {a: b} { upper(y, a); json.unmarshal(x, b) }`, + `corge(x) = y { qux({"bar": x, "foo": x}, a); baz([a["{5: true}"], "BUZ"], y) }`, } body := strings.Join(functions, "\n") base := fmt.Sprintf("package base\n%s", body) @@ -808,35 +790,35 @@ func TestUserFunctionsTypeInference(t *testing.T) { } tests := []struct { - body string - err bool + body string + wantErr bool }{ { - `fn() = y { base.foo(["hello", 5], y) }`, + `fn(_) = y { data.base.foo(["hello", 5], y) }`, true, }, { - `fn() = y { base.foo(["hello", "ll"], y) }`, + `fn(_) = y { data.base.foo(["hello", "ll"], y) }`, false, }, { - `fn() = y { base.baz(["hello", "ll"], y) }`, + `fn(_) = y { data.base.baz(["hello", "ll"], y) }`, false, }, { - `fn() = y { base.baz([5, ["foo", "bar", true]], y) }`, + `fn(_) = y { data.base.baz([5, ["foo", "bar", true]], y) }`, false, }, { - `fn() = y { base.baz(["hello", {"a": "b", "c": 3}], y) }`, + `fn(_) = y { data.base.baz(["hello", {"a": "b", "c": 3}], y) }`, false, }, { - `fn() = y { base.foobar("this is not json", y) }`, + `fn(_) = y { data.base.corge("this is not json", y) }`, false, }, { - `fn(x) = y { noexist(x, a); y = a[0] }`, + `fn(x) = y { data.non_existent(x, a); y = a[0] }`, true, }, } @@ -845,8 +827,11 @@ func TestUserFunctionsTypeInference(t *testing.T) { t.Run(fmt.Sprintf("Test Case %d", n), func(t *testing.T) { mod := MustParseModule(fmt.Sprintf("package test\n%s", test.body)) c := NewCompiler() - if c.Compile(map[string]*Module{"base": MustParseModule(base), "mod": mod}); c.Failed() != test.err { - t.Fatalf("Expected compiler to fail: %t, compiler failed: %t", test.err, c.Failed()) + c.Compile(map[string]*Module{"base": MustParseModule(base), "mod": mod}) + if test.wantErr && !c.Failed() { + t.Errorf("Expected error but got success") + } else if !test.wantErr && c.Failed() { + t.Errorf("Expected success but got error: %v", c.Errors) } }) } @@ -901,16 +886,6 @@ func TestCheckErrorDetails(t *testing.T) { ` want (one of): ["a" "b"]`, }, }, - { - detail: &RefErrMissingDetail{ - Ref: MustParseRef("xxx.foo"), - }, - expected: []string{ - "xxx.foo", - "^^^", - "missing type information", - }, - }, { detail: &ArgErrDetail{ Have: []types.Type{ diff --git a/ast/compare.go b/ast/compare.go index f7caa44f13..28c8afc3c7 100644 --- a/ast/compare.go +++ b/ast/compare.go @@ -179,15 +179,9 @@ func Compare(a, b interface{}) int { case *Head: b := b.(*Head) return a.Compare(b) - case *FuncHead: - b := b.(*FuncHead) - return a.Compare(b) case *Rule: b := b.(*Rule) return a.Compare(b) - case *Func: - b := b.(*Func) - return a.Compare(b) case Args: b := b.(Args) return termSliceCompare(a, b) @@ -244,18 +238,14 @@ func sortOrder(x interface{}) int { return 110 case *Head: return 120 - case *FuncHead: - return 130 case Body: return 200 case *Rule: return 1000 - case *Func: - return 1001 case *Import: - return 1002 + return 1001 case *Package: - return 1003 + return 1002 case *Module: return 10000 } diff --git a/ast/compile.go b/ast/compile.go index a993c8cfab..9a4cefdd15 100644 --- a/ast/compile.go +++ b/ast/compile.go @@ -68,35 +68,8 @@ type Compiler struct { // +--- q (1 rule) RuleTree *TreeNode - // FuncTree organizes user functions into a tree where each node is keyed - // by an element in the logical path to the function. The logical path is - // the concatenations of the containing package and the stringified function - // name. Functions are only located at the lead nodes, each of which have - // exactly 1 function. E.g., given the following module: - // - // package a.b - // p(x) = y { y = x } - // q(x) = y { y = 2*x } - // - // root - // | - // +--- a (no functions) - // | - // +--- b (no functions) - // | - // +--- p - // | - // +--- q - FuncTree *TreeNode - - // FunctionMap is a map containing the user defined functions of this - // compiler's modules. - FuncMap map[string][]*Func - - // Graph represents the dependencies between rules and funcs (lets call - // them targets). An edge (u,v) is added to the graph if target "u" - // depends on target "v". A target "u" depends on target "v" if target - // "u" refers to the virtual document (or function) defined by target "v". + // Graph contains dependencies between rules. An edge (u,v) is added to the + // graph if rule 'u' refers to the virtual document defined by 'v'. Graph *Graph // TypeEnv holds type information for values inferred by the compiler. @@ -194,7 +167,6 @@ func NewCompiler() *Compiler { c := &Compiler{ Modules: map[string]*Module{}, TypeEnv: NewTypeEnv(), - FuncMap: map[string][]*Func{}, generatedVars: map[*Module]VarSet{}, ruleIndices: util.NewHashMap(func(a, b util.T) bool { r1, r2 := a.(Ref), b.(Ref) @@ -207,7 +179,6 @@ func NewCompiler() *Compiler { c.ModuleTree = NewModuleTree(nil) c.RuleTree = NewRuleTree(c.ModuleTree) - c.FuncTree = NewFuncTree(c.ModuleTree) checker := newTypeChecker() c.TypeEnv = checker.checkLanguageBuiltins() @@ -216,14 +187,11 @@ func NewCompiler() *Compiler { c.resolveAllRefs, c.setModuleTree, c.setRuleTree, - c.setFuncTree, c.setGraph, c.rewriteComprehensionTerms, c.rewriteRefsInHead, c.checkWithModifiers, c.checkRuleConflicts, - c.checkSafetyFuncHeads, - c.checkSafetyFuncBodies, c.checkSafetyRuleHeads, c.checkSafetyRuleBodies, c.checkRecursion, @@ -404,27 +372,6 @@ func (c *Compiler) GetRules(ref Ref) (rules []*Rule) { return rules } -// GetFunc returns the function referred to by name. -func (c *Compiler) GetFunc(name string) []*Func { - if fn, ok := c.FuncMap[name]; ok { - return fn - } - return nil -} - -// GetAllFuncs returns a map of functions that this compiler has discovered. -func (c *Compiler) GetAllFuncs() map[string][]*Func { - cpy := map[string][]*Func{} - for _, fn := range c.FuncMap { - var fns []*Func - for _, f := range fn { - fns = append(fns, f.Copy()) - } - cpy[fn[0].Path().String()] = fns - } - return cpy -} - // RuleIndex returns a RuleIndex built for the rule set referred to by path. // The path must refer to the rule set exactly, i.e., given a rule set at path // data.a.b.c.p, refs data.a.b.c.p.x and data.a.b.c would not return a @@ -476,15 +423,7 @@ func (c *Compiler) buildRuleIndices() { // no cycles in the Graph. func (c *Compiler) checkRecursion() { eq := func(a, b util.T) bool { - ar, aok := a.(*Rule) - br, bok := b.(*Rule) - if aok && bok { - return ar == br - } - - af, aok := a.(*Func) - bf, bok := b.(*Func) - return aok && bok && af == bf + return a.(*Rule) == b.(*Rule) } c.RuleTree.DepthFirst(func(node *TreeNode) bool { @@ -494,14 +433,6 @@ func (c *Compiler) checkRecursion() { } return false }) - - c.FuncTree.DepthFirst(func(node *TreeNode) bool { - for _, fn := range node.Values { - f := fn.(*Func) - c.checkSelfPath(FuncTypeName, f.Loc(), eq, f, f) - } - return false - }) } func (c *Compiler) checkSelfPath(t string, loc *Location, eq func(a, b util.T) bool, a, b util.T) { @@ -519,8 +450,6 @@ func astNodeToString(x interface{}) string { switch x := x.(type) { case *Rule: return string(x.Head.Name) - case *Func: - return string(x.Head.Name) default: panic("not reached") } @@ -535,10 +464,12 @@ func (c *Compiler) checkRuleConflicts() { kinds := map[DocKind]struct{}{} defaultRules := 0 + arities := map[int]struct{}{} for _, rule := range node.Values { r := rule.(*Rule) kinds[r.Head.DocKind()] = struct{}{} + arities[len(r.Head.Args)] = struct{}{} if r.Default { defaultRules++ } @@ -546,7 +477,7 @@ func (c *Compiler) checkRuleConflicts() { name := Var(node.Key.(String)) - if len(kinds) > 1 { + if len(kinds) > 1 || len(arities) > 1 { c.err(NewError(TypeErr, node.Values[0].(*Rule).Loc(), "conflicting rules named %v found", name)) } @@ -560,13 +491,6 @@ func (c *Compiler) checkRuleConflicts() { c.ModuleTree.DepthFirst(func(node *ModuleTreeNode) bool { for _, mod := range node.Modules { for _, rule := range mod.Rules { - for _, fn := range mod.Funcs { - if rule.Head.Name.Equal(fn.Head.Name) { - msg := fmt.Sprintf("rule defined at %v conflicts with function defined at %v", rule.Loc(), fn.Loc()) - c.err(NewError(CompileErr, mod.Package.Loc(), msg)) - } - } - if childNode, ok := node.Children[String(rule.Head.Name)]; ok { for _, childMod := range childNode.Modules { msg := fmt.Sprintf("%v conflicts with rule defined at %v", childMod.Package, rule.Loc()) @@ -584,26 +508,15 @@ func (c *Compiler) checkRuleConflicts() { // to right, re-ordering as necessary. func (c *Compiler) checkSafetyRuleBodies() { for _, m := range c.Modules { - safe := ReservedVars.Copy() WalkRules(m, func(r *Rule) bool { + safe := ReservedVars.Copy() + safe.Update(r.Head.Args.Vars()) r.Body = c.checkBodySafety(safe, m, r.Body, r.Loc()) return false }) } } -func (c *Compiler) checkSafetyFuncBodies() { - for _, m := range c.Modules { - safe := ReservedVars.Copy() - WalkFuncs(m, func(f *Func) bool { - s := safe.Copy() - s.Update(f.Head.ArgVars()) - f.Body = c.checkBodySafety(s, m, f.Body, f.Loc()) - return false - }) - } -} - func (c *Compiler) checkBodySafety(safe VarSet, m *Module, b Body, l *Location) Body { reordered, unsafe := reorderBodyForSafety(safe, b) if len(unsafe) != 0 { @@ -627,7 +540,9 @@ var safetyCheckVarVisitorParams = VarVisitorParams{ func (c *Compiler) checkSafetyRuleHeads() { for _, m := range c.Modules { WalkRules(m, func(r *Rule) bool { - unsafe := r.Head.Vars().Diff(r.Body.Vars(safetyCheckVarVisitorParams)) + safe := r.Body.Vars(safetyCheckVarVisitorParams) + safe.Update(r.Head.Args.Vars()) + unsafe := r.Head.Vars().Diff(safe) for v := range unsafe { if !c.generatedVars[m].Contains(v) { c.err(NewError(UnsafeVarErr, r.Loc(), "%v %v is unsafe", VarTypeName, v)) @@ -638,24 +553,8 @@ func (c *Compiler) checkSafetyRuleHeads() { } } -func (c *Compiler) checkSafetyFuncHeads() { - for _, m := range c.Modules { - WalkFuncs(m, func(f *Func) bool { - vars := f.Body.Vars(safetyCheckVarVisitorParams) - vars.Update(f.Head.ArgVars()) - unsafe := f.Head.OutVars().Diff(vars) - for v := range unsafe { - if !c.generatedVars[m].Contains(v) { - c.err(NewError(UnsafeVarErr, f.Loc(), "%v %v is unsafe", VarTypeName, v)) - } - } - return false - }) - } -} - -// checkTypes runs the type checker on all rules and user functions. The type -// checker builds a TypeEnv that is stored on the compiler. +// checkTypes runs the type checker on all rules. The type checker builds a +// TypeEnv that is stored on the compiler. func (c *Compiler) checkTypes() { // Recursion is caught in earlier step, so this cannot fail. sorted, _ := c.Graph.Sort() @@ -700,7 +599,7 @@ func (c *Compiler) err(err *Error) { c.Errors = append(c.Errors, err) } -func (c *Compiler) getExports() (*util.HashMap, *util.HashMap) { +func (c *Compiler) getExports() *util.HashMap { rules := util.NewHashMap(func(a, b util.T) bool { r1 := a.(Ref) @@ -709,7 +608,6 @@ func (c *Compiler) getExports() (*util.HashMap, *util.HashMap) { }, func(v util.T) int { return v.(Ref).Hash() }) - funcs := rules.Copy() for _, mod := range c.Modules { rv, ok := rules.Get(mod.Package.Path) @@ -718,23 +616,13 @@ func (c *Compiler) getExports() (*util.HashMap, *util.HashMap) { } rvs := rv.([]Var) - fv, ok := funcs.Get(mod.Package.Path) - if !ok { - fv = []*Func{} - } - fvs := fv.([]*Func) - for _, rule := range mod.Rules { rvs = append(rvs, rule.Head.Name) } - for _, fn := range mod.Funcs { - fvs = append(fvs, fn) - } rules.Put(mod.Package.Path, rvs) - funcs.Put(mod.Package.Path, fvs) } - return rules, funcs + return rules } // resolveAllRefs resolves references in expressions to their fully qualified values. @@ -748,7 +636,7 @@ func (c *Compiler) getExports() (*util.HashMap, *util.HashMap) { // The reference "bar[_]" would be resolved to "data.foo.bar[_]". func (c *Compiler) resolveAllRefs() { - rules, funcs := c.getExports() + rules := c.getExports() for _, mod := range c.Modules { @@ -757,56 +645,17 @@ func (c *Compiler) resolveAllRefs() { ruleExports = x.([]Var) } - var funcExports []*Func - if x, ok := funcs.Get(mod.Package.Path); ok { - funcExports = x.([]*Func) - } + globals := getGlobals(mod.Package, ruleExports, mod.Imports) - globals := getGlobals(mod.Package, ruleExports, funcExports, mod.Imports) WalkRules(mod, func(rule *Rule) bool { resolveRefsInRule(globals, rule) return false }) - WalkFuncs(mod, func(fn *Func) bool { - resolveRefsInFunc(globals, fn) - path := fn.Path().String() - c.FuncMap[path] = append(c.FuncMap[path], fn) - - return false - }) // Once imports have been resolved, they are no longer needed. mod.Imports = nil } - for _, mod := range c.Modules { - var visitor Visitor - visitor = NewGenericVisitor(func(x interface{}) bool { - switch x := x.(type) { - case *Expr: - if terms, ok := x.Terms.([]*Term); ok { - for i := 1; i < len(terms); i++ { - Walk(visitor, terms[i]) - } - return true - } - case *Term: - switch v := x.Value.(type) { - case Ref: - if _, ok := c.FuncMap[v.String()]; ok { - c.err(&Error{ - Code: CompileErr, - Message: x.Location.Format("%v refers to a known builtin but does not call it", string(x.Location.Text)), - }) - } - } - } - return false - }) - - Walk(visitor, mod) - } - if c.moduleLoader != nil { parsed, err := c.moduleLoader(c.Modules) @@ -884,12 +733,8 @@ func (c *Compiler) setRuleTree() { c.RuleTree = NewRuleTree(c.ModuleTree) } -func (c *Compiler) setFuncTree() { - c.FuncTree = NewFuncTree(c.ModuleTree) -} - func (c *Compiler) setGraph() { - c.Graph = NewGraph(c.Modules, c.GetRules, c.GetFunc) + c.Graph = NewGraph(c.Modules, c.GetRules) } type queryCompiler struct { @@ -948,17 +793,12 @@ func (qc *queryCompiler) resolveRefs(qctx *QueryContext, body Body) (Body, error if qctx != nil && qctx.Package != nil { var ruleExports []Var - rules, funcs := qc.compiler.getExports() + rules := qc.compiler.getExports() if exist, ok := rules.Get(qctx.Package.Path); ok { ruleExports = exist.([]Var) } - var funcExports []*Func - if exist, ok := funcs.Get(qctx.Package.Path); ok { - funcExports = exist.([]*Func) - } - - globals = getGlobals(qctx.Package, ruleExports, funcExports, qc.qctx.Imports) + globals = getGlobals(qctx.Package, ruleExports, qc.qctx.Imports) qctx.Imports = nil } @@ -1141,42 +981,6 @@ func (n *TreeNode) DepthFirst(f func(node *TreeNode) bool) { } } -// NewFuncTree returns a new TreeNode that represents the root -// of the function tree populated with the given functions. -func NewFuncTree(mtree *ModuleTreeNode) *TreeNode { - funcSet := map[String]*Func{} - - // Build function sets for this package. - for _, mod := range mtree.Modules { - for _, fn := range mod.Funcs { - key := String(fn.Head.Name) - funcSet[key] = fn - } - } - - // Each function becomes a leaf node. - children := map[Value]*TreeNode{} - for key, fn := range funcSet { - children[key] = &TreeNode{ - Key: key, - Children: nil, - Values: []util.T{fn}, - } - } - - // Each module in subpackage becomes child node. - for _, child := range mtree.Children { - children[child.Key] = NewFuncTree(child) - } - - return &TreeNode{ - Key: mtree.Key, - Values: nil, - Children: children, - Hide: mtree.Hide, - } -} - type withModifierChecker struct { errors Errors expr *Expr @@ -1241,16 +1045,16 @@ func (wc *withModifierChecker) err(code string, loc *Location, f string, a ...in wc.errors = append(wc.errors, NewError(code, loc, f, a...)) } -// Graph represents the graph of dependencies between ast Rules and Funcs. +// Graph represents the graph of dependencies between rules. type Graph struct { adj map[util.T]map[util.T]struct{} nodes map[util.T]struct{} sorted []util.T } -// NewGraph returns a new Graph based on modules. The list function -// must return the rules or user functions referred to directly by the ref. -func NewGraph(modules map[string]*Module, list func(Ref) []*Rule, resolve func(string) []*Func) *Graph { +// NewGraph returns a new Graph based on modules. The list function must return +// the rules referred to directly by the ref. +func NewGraph(modules map[string]*Module, list func(Ref) []*Rule) *Graph { graph := &Graph{ adj: map[util.T]map[util.T]struct{}{}, @@ -1258,7 +1062,7 @@ func NewGraph(modules map[string]*Module, list func(Ref) []*Rule, resolve func(s sorted: nil, } - // Walk over all rules and functions, add them to graph, and build adjencency lists. + // Walk over all rules, add them to graph, and build adjencency lists. for _, module := range modules { addRefDeps := func(a util.T) func(ref Ref) bool { return func(ref Ref) bool { @@ -1268,32 +1072,9 @@ func NewGraph(modules map[string]*Module, list func(Ref) []*Rule, resolve func(s return false } } - addFuncDeps := func(a util.T) func(expr *Expr) bool { - return func(expr *Expr) bool { - if expr.IsBuiltin() { - name := expr.Terms.([]*Term)[0].String() - - // Language builtins won't be resolved. - if b := resolve(name); b != nil { - for _, c := range b { - graph.addDependency(a, c) - } - } - } - return false - } - } - WalkRules(module, func(a *Rule) bool { graph.addNode(a) WalkRefs(a, addRefDeps(a)) - WalkExprs(a, addFuncDeps(a)) - return false - }) - WalkFuncs(module, func(a *Func) bool { - graph.addNode(a) - WalkRefs(a, addRefDeps(a)) - WalkExprs(a, addFuncDeps(a)) return false }) } @@ -1301,13 +1082,13 @@ func NewGraph(modules map[string]*Module, list func(Ref) []*Rule, resolve func(s return graph } -// Dependencies returns the set of rules and funcs that x depends on. +// Dependencies returns the set of rules that x depends on. func (g *Graph) Dependencies(x util.T) map[util.T]struct{} { return g.adj[x] } -// Sort returns a slice of rules and functions sorted by dependencies. If a cycle -// is found, ok is set to false. +// Sort returns a slice of rules sorted by dependencies. If a cycle is found, +// ok is set to false. func (g *Graph) Sort() (sorted []util.T, ok bool) { if g.sorted != nil { return g.sorted, true @@ -1672,7 +1453,7 @@ func (l *localVarGenerator) Generated() VarSet { return l.generated } -func getGlobals(pkg *Package, rules []Var, funcs []*Func, imports []*Import) map[Var]Ref { +func getGlobals(pkg *Package, rules []Var, imports []*Import) map[Var]Ref { globals := map[Var]Ref{} @@ -1682,9 +1463,6 @@ func getGlobals(pkg *Package, rules []Var, funcs []*Func, imports []*Import) map global = append(global, &Term{Value: String(v)}) globals[v] = global } - for _, fn := range funcs { - globals[fn.Head.Name] = fn.Path() - } // Populate globals with imports. for _, i := range imports { @@ -1742,6 +1520,9 @@ func resolveRef(globals map[Var]Ref, ref Ref) Ref { } func resolveRefsInRule(globals map[Var]Ref, rule *Rule) { + for i := range rule.Head.Args { + rule.Head.Args[i] = resolveRefsInTerm(globals, rule.Head.Args[i]) + } if rule.Head.Key != nil { rule.Head.Key = resolveRefsInTerm(globals, rule.Head.Key) } @@ -1751,14 +1532,6 @@ func resolveRefsInRule(globals map[Var]Ref, rule *Rule) { rule.Body = resolveRefsInBody(globals, rule.Body) } -func resolveRefsInFunc(globals map[Var]Ref, fn *Func) { - for i := range fn.Head.Args { - fn.Head.Args[i] = resolveRefsInTerm(globals, fn.Head.Args[i]) - } - fn.Head.Output = resolveRefsInTerm(globals, fn.Head.Output) - fn.Body = resolveRefsInBody(globals, fn.Body) -} - func resolveRefsInBody(globals map[Var]Ref, body Body) Body { r := Body{} for _, expr := range body { @@ -1774,26 +1547,9 @@ func resolveRefsInExpr(globals map[Var]Ref, expr *Expr) *Expr { cpy.Terms = resolveRefsInTerm(globals, ts) case []*Term: buf := make([]*Term, len(ts)) - - // Resolve refs to functions inside the package. Refs outside the - // package must be fully qualified. FIXME(tsandall): this can go away - // once functions are merged with rules. - ref := ts[0].Value.(Ref) - if path, ok := globals[ref[0].Value.(Var)]; ok && len(ref) == 1 { - refCopy := path.Copy() - for i := range refCopy { - refCopy[i].SetLocation(ts[0].Location) - } - buf[0] = NewTerm(refCopy) - } else { - buf[0] = ts[0] - } - - // resolve remaining terms normally - for i := 1; i < len(ts); i++ { + for i := 0; i < len(ts); i++ { buf[i] = resolveRefsInTerm(globals, ts[i]) } - cpy.Terms = buf } for _, w := range cpy.With { diff --git a/ast/compile_test.go b/ast/compile_test.go index 898244a613..128f108dc6 100644 --- a/ast/compile_test.go +++ b/ast/compile_test.go @@ -69,7 +69,7 @@ s[2] { true }`, ) tree := NewRuleTree(NewModuleTree(mods)) - expectedNumRules := 21 + expectedNumRules := 23 if tree.Size() != expectedNumRules { t.Errorf("Expected %v but got %v rules", expectedNumRules, tree.Size()) @@ -110,14 +110,14 @@ func TestCompilerExample(t *testing.T) { assertNotFailed(t, c) } -func TestCompilerUserFunction(t *testing.T) { +func TestCompilerFunctions(t *testing.T) { tests := []struct { + note string modules []string - assert func(*testing.T, *Compiler) - errs []string + wantErr bool }{ { - // Test that functions can have different input types. + note: "multiple input types", modules: []string{`package x f([x]) = y { @@ -127,9 +127,9 @@ func TestCompilerUserFunction(t *testing.T) { f({"foo": x}) = y { y = x }`}, - assert: assertNotFailed, }, { + note: "multiple input types", modules: []string{`package x f([x]) = y { @@ -139,9 +139,21 @@ func TestCompilerUserFunction(t *testing.T) { f([[x]]) = y { y = x }`}, - assert: assertNotFailed, }, { + note: "constant input", + modules: []string{`package x + + f(1) = y { + y = "foo" + } + + f(2) = y { + y = "bar" + }`}, + }, + { + note: "constant input", modules: []string{`package x f(1, x) = y { @@ -151,9 +163,9 @@ func TestCompilerUserFunction(t *testing.T) { f(x, y) = z { z = x+y }`}, - assert: assertNotFailed, }, { + note: "constant input", modules: []string{`package x f(x, 1) = y { @@ -163,9 +175,9 @@ func TestCompilerUserFunction(t *testing.T) { f(x, [y]) = z { z = x+y }`}, - assert: assertNotFailed, }, { + note: "multiple input types (nested)", modules: []string{`package x f({"foo": {"bar": x}}) = y { @@ -175,9 +187,9 @@ func TestCompilerUserFunction(t *testing.T) { f({"foo": [x]}) = y { y = x }`}, - assert: assertNotFailed, }, { + note: "multiple output types", modules: []string{`package x f(1) = y { @@ -187,137 +199,104 @@ func TestCompilerUserFunction(t *testing.T) { f(2) = y { y = 2 }`}, - assert: assertNotFailed, }, { - modules: []string{`package x + note: "namespacing", + modules: []string{ + `package x - f(1) = y { - y = "foo" - } + f(x) = y { + data.y.f[x] = y + }`, + `package y - f(2) = y { + f[x] = y { y = "bar" - }`}, - assert: assertNotFailed, + x = "foo" + }`, + }, }, - // Test that functions must have the same number of inputs. { - modules: []string{`package x - - f(x) = y { - y = x - } - - f(x, y) = z { - z = x+y - } + note: "implicit value", + modules: []string{ + `package x - f(x, y, z) = a { - b = x+y - a = b+z + f(x) { + x = "foo" }`}, - assert: assertFailed, }, - // Test that a function and rule within the same package cannot - // share a name. { - modules: []string{`package x + note: "resolving", + modules: []string{ + `package x - f(x) = y { - y = x - } + f(x) = x { true }`, + + `package y + + import data.x + import data.x.f as g - f[x] = y { - x = "foo" - y = "bar" - }`}, - assert: assertFailed, + p { g(1, _) } + p { x.f(1, _) } + p { data.x.f(1, _) } + `, + }, }, - // Test that a function and rule within different packages can - // share a name. { + note: "undefined", modules: []string{ `package x - f(x) = y { - data.y.f[x] = y - }`, - `package y - - f[x] = y { - y = "bar" - x = "foo" + p { + f(1) }`, }, - assert: assertNotFailed, + wantErr: true, }, - // Test that a reference to a function that does not invoke it - // errors. { + note: "must apply", modules: []string{ `package x - f(x) = y { - y = x - } + f(1) - g = y { - y = "foo" + p { f - }`, - `package y - - g = y { - y = "foo" - x.f - }`, - }, - assert: assertFailed, - errs: []string{ - "rego_compile_error: mod0:9: f refers to a known builtin but does not call it", - "rego_compile_error: mod1:5: x.f refers to a known builtin but does not call it", + } + `, }, + wantErr: true, }, - // Test that void functions compile properly. { + note: "must apply", modules: []string{ `package x - - f(x) { - x = "foo" - }`}, - assert: assertNotFailed, + f(1) + p { f.x }`, + }, + wantErr: true, }, } - for _, test := range tests { - var err error - modules := map[string]*Module{} - for i, module := range test.modules { - name := fmt.Sprintf("mod%d", i) - modules[name], err = ParseModule(name, module) - if err != nil { - panic(err) - } - } - - c := NewCompiler() - c.Compile(modules) - test.assert(t, c) - - if test.errs != nil { - errs := c.Errors - var result []string - for _, err := range errs { - result = append(result, err.Error()) + for _, tc := range tests { + test.Subtest(t, tc.note, func(t *testing.T) { + var err error + modules := map[string]*Module{} + for i, module := range tc.modules { + name := fmt.Sprintf("mod%d", i) + modules[name], err = ParseModule(name, module) + if err != nil { + panic(err) + } } - - sort.Strings(test.errs) - sort.Strings(result) - if !reflect.DeepEqual(test.errs, result) { - t.Errorf("Expected errors %v test.errs, got %v", test.errs, result) + c := NewCompiler() + c.Compile(modules) + if tc.wantErr && !c.Failed() { + t.Errorf("Expected compilation error") + } else if !tc.wantErr && c.Failed() { + t.Errorf("Unexpected compilation error(s): %v", c.Errors) } - } + }) } } @@ -426,8 +405,8 @@ func TestCompilerCheckSafetyBodyReordering(t *testing.T) { x = y[0]; contains(x, "oo") `}, - {"userfunc", `split(y, ".", z); a.b.funcs.fn("...foo.bar..", y)`, `a.b.funcs.fn("...foo.bar..", y); split(y, ".", z)`}, - {"call-vars", `f.g[i](1); i = "foo"`, `i = "foo"; f.g[i](1)`}, + {"userfunc", `split(y, ".", z); data.a.b.funcs.fn("...foo.bar..", y)`, `data.a.b.funcs.fn("...foo.bar..", y); split(y, ".", z)`}, + {"call-vars", `data.f.g[i](1); i = "foo"`, `i = "foo"; data.f.g[i](1)`}, } for i, tc := range tests { @@ -477,20 +456,20 @@ r = true { a = [x | split(y, ".", z); x = z[i]; fn("...foo.bar..", y)] }`, compileStages(c, c.checkSafetyRuleBodies) assertNotFailed(t, c) - result1 := c.Modules["mod"].Rules[0].Body + result1 := c.Modules["mod"].Rules[1].Body expected1 := MustParseBody(`v = [null | true]; data.b[i] = j; xs = [x | a = [y | y = data.c[j]; y != 1]; a[i] = x]; z = [true | i2 = i; data.a.b.d.t with input as i2]; xs[j] > 0`) if !result1.Equal(expected1) { t.Errorf("Expected reordered body to be equal to:\n%v\nBut got:\n%v", expected1, result1) } - result2 := c.Modules["mod"].Rules[1].Body + result2 := c.Modules["mod"].Rules[2].Body expected2 := MustParseBody(`_ = [x | x = data.b[i]]; _ = data.b[j]; _ = [x | x = true; x != false]; true != false; _ = [x | data.foo[_] = x]; data.foo[_] = _`) if !result2.Equal(expected2) { t.Errorf("Expected pre-ordered body to equal:\n%v\nBut got:\n%v", expected2, result2) } - result3 := c.Modules["mod"].Rules[2].Body - expected3 := MustParseBody(`a = [x | compr.fn("...foo.bar..", y); split(y, ".", z); x = z[i]]`) + result3 := c.Modules["mod"].Rules[3].Body + expected3 := MustParseBody(`a = [x | data.compr.fn("...foo.bar..", y); split(y, ".", z); x = z[i]]`) if !result3.Equal(expected3) { t.Errorf("Expected pre-ordered body to equal:\n%v\nBut got:\n%v", expected3, result3) } @@ -537,6 +516,8 @@ func TestCompilerCheckSafetyBodyErrors(t *testing.T) { {"else-kw", "p { false } else { count(x, 1) }", `{x,}`}, {"userfunc", "foo(x) = [y, z] { split(x, y, z) }", `{y,z}`}, {"call-vars", "p { f[i].g[j](1) }", `{i, j}`}, + {"call-vars-input", "p { f(x, x) } f(x) = x { true }", `{x,}`}, + {"call-no-output", "p { f(x) } f(x) = x { true }", `{x,}`}, } makeErrMsg := func(varName string) string { @@ -643,11 +624,20 @@ q[1] { true }`, default foo = 1 default foo = 2 foo = 3 { true }`, + "mod4.rego": `package adrules.arity + +f(1) { true } +f { true } + +g(1) { true } +g(1,2) { true }`, }) compileStages(c, c.checkRuleConflicts) expected := []string{ + "rego_type_error: conflicting rules named f found", + "rego_type_error: conflicting rules named g found", "rego_type_error: conflicting rules named p found", "rego_type_error: conflicting rules named q found", "rego_type_error: multiple default rules named foo found", @@ -910,21 +900,16 @@ func TestCompilerSetGraph(t *testing.T) { } numRules := 0 - numFuncs := 0 for _, module := range c.Modules { WalkRules(module, func(*Rule) bool { numRules++ return false }) - WalkFuncs(module, func(*Func) bool { - numFuncs++ - return false - }) } - if len(sorted) != numRules+numFuncs { - t.Fatalf("Expected numRules+numFuncs (%v) to be same as len(sorted) (%v)", numRules+numFuncs, len(sorted)) + if len(sorted) != numRules { + t.Fatalf("Expected numRules (%v) to be same as len(sorted) (%v)", numRules, len(sorted)) } // Probe rules with dependencies. Ordering is not stable for ties because @@ -1120,10 +1105,6 @@ dataref = true { data }`, return fmt.Sprintf("rego_recursion_error: rule %v is recursive: %v", rule, strings.Join(loop, " -> ")) } - makeFuncErrMsg := func(fn string, loop ...string) string { - return fmt.Sprintf("rego_recursion_error: func %v is recursive: %v", fn, strings.Join(loop, " -> ")) - } - expected := []string{ makeRuleErrMsg("s", "s", "t", "s"), makeRuleErrMsg("t", "t", "s", "t"), @@ -1143,11 +1124,11 @@ dataref = true { data }`, makeRuleErrMsg("elsetop", "elsetop", "elsemid", "elsebottom", "elsetop"), makeRuleErrMsg("elsemid", "elsemid", "elsebottom", "elsetop", "elsemid"), makeRuleErrMsg("elsebottom", "elsebottom", "elsetop", "elsemid", "elsebottom"), - makeFuncErrMsg("fn", "fn", "fn"), - makeFuncErrMsg("foo", "foo", "bar", "foo"), - makeFuncErrMsg("bar", "bar", "foo", "bar"), - makeFuncErrMsg("bar", "bar", "p", "foo", "bar"), - makeFuncErrMsg("foo", "foo", "bar", "p", "foo"), + makeRuleErrMsg("fn", "fn", "fn"), + makeRuleErrMsg("foo", "foo", "bar", "foo"), + makeRuleErrMsg("bar", "bar", "foo", "bar"), + makeRuleErrMsg("bar", "bar", "p", "foo", "bar"), + makeRuleErrMsg("foo", "foo", "bar", "p", "foo"), makeRuleErrMsg("p", "p", "foo", "bar", "p"), } diff --git a/ast/env.go b/ast/env.go index d01c8f2c77..22a7a9f924 100644 --- a/ast/env.go +++ b/ast/env.go @@ -11,36 +11,17 @@ import ( // TypeEnv contains type info for static analysis such as type checking. type TypeEnv struct { - funcs map[string][]types.Type - tree *typeTreeNode - next *TypeEnv + tree *typeTreeNode + next *TypeEnv } // NewTypeEnv returns an empty TypeEnv. func NewTypeEnv() *TypeEnv { return &TypeEnv{ - funcs: map[string][]types.Type{}, - tree: newTypeTree(), + tree: newTypeTree(), } } -// GetFunc returns the type array corresponding to the arguments of the function -// referred to by name. GetFunc returns nil if there is no function matching that -// name. -func (env *TypeEnv) GetFunc(name string) []types.Type { - tps, ok := env.funcs[name] - if !ok && env.next != nil { - return env.next.GetFunc(name) - } - return tps -} - -// PutFunc inserts the type information for the function referred to by name into -// this TypeEnv. -func (env *TypeEnv) PutFunc(name string, args []types.Type) { - env.funcs[name] = args -} - // Get returns the type of x. func (env *TypeEnv) Get(x interface{}) types.Type { @@ -54,7 +35,10 @@ func (env *TypeEnv) Get(x interface{}) types.Type { case Null: return types.NewNull() case Boolean: - return types.NewBoolean() + if x.Compare(Boolean(true)) == 0 { + return types.T + } + return types.F case Number: return types.NewNumber() case String: diff --git a/ast/parser.go b/ast/parser.go index 943d98bdab..f2bb518ade 100644 --- a/ast/parser.go +++ b/ast/parser.go @@ -104,51 +104,80 @@ func makeArray(head interface{}, tail interface{}, loc *Location) (*Term, error) return arr, nil } +func makeArgs(head interface{}, tail interface{}, loc *Location) (Args, error) { + args := Args{} + if head == nil { + return nil, nil + } + args = append(args, head.(*Term)) + tailSlice := tail.([]interface{}) + for _, v := range tailSlice { + s := v.([]interface{}) + args = append(args, s[len(s)-1].(*Term)) + } + return args, nil +} + +func makeInfixCallExpr(operator interface{}, args interface{}, output interface{}) (*Expr, error) { + expr := &Expr{} + a := args.(Args) + terms := make([]*Term, len(a)+2) + terms[0] = operator.(*Term) + dst := terms[1:] + for i := 0; i < len(a); i++ { + dst[i] = a[i] + } + terms[len(terms)-1] = output.(*Term) + expr.Terms = terms + expr.Infix = true + return expr, nil +} + var g = &grammar{ rules: []*rule{ { name: "Program", - pos: position{line: 96, col: 1, offset: 2438}, + pos: position{line: 125, col: 1, offset: 3203}, expr: &actionExpr{ - pos: position{line: 96, col: 12, offset: 2449}, + pos: position{line: 125, col: 12, offset: 3214}, run: (*parser).callonProgram1, expr: &seqExpr{ - pos: position{line: 96, col: 12, offset: 2449}, + pos: position{line: 125, col: 12, offset: 3214}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 96, col: 12, offset: 2449}, + pos: position{line: 125, col: 12, offset: 3214}, name: "_", }, &labeledExpr{ - pos: position{line: 96, col: 14, offset: 2451}, + pos: position{line: 125, col: 14, offset: 3216}, label: "vals", expr: &zeroOrOneExpr{ - pos: position{line: 96, col: 19, offset: 2456}, + pos: position{line: 125, col: 19, offset: 3221}, expr: &seqExpr{ - pos: position{line: 96, col: 20, offset: 2457}, + pos: position{line: 125, col: 20, offset: 3222}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 96, col: 20, offset: 2457}, + pos: position{line: 125, col: 20, offset: 3222}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 96, col: 25, offset: 2462}, + pos: position{line: 125, col: 25, offset: 3227}, name: "Stmt", }, }, &labeledExpr{ - pos: position{line: 96, col: 30, offset: 2467}, + pos: position{line: 125, col: 30, offset: 3232}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 96, col: 35, offset: 2472}, + pos: position{line: 125, col: 35, offset: 3237}, expr: &seqExpr{ - pos: position{line: 96, col: 36, offset: 2473}, + pos: position{line: 125, col: 36, offset: 3238}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 96, col: 36, offset: 2473}, + pos: position{line: 125, col: 36, offset: 3238}, name: "ws", }, &ruleRefExpr{ - pos: position{line: 96, col: 39, offset: 2476}, + pos: position{line: 125, col: 39, offset: 3241}, name: "Stmt", }, }, @@ -160,11 +189,11 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 96, col: 48, offset: 2485}, + pos: position{line: 125, col: 48, offset: 3250}, name: "_", }, &ruleRefExpr{ - pos: position{line: 96, col: 50, offset: 2487}, + pos: position{line: 125, col: 50, offset: 3252}, name: "EOF", }, }, @@ -173,38 +202,34 @@ var g = &grammar{ }, { name: "Stmt", - pos: position{line: 114, col: 1, offset: 2861}, + pos: position{line: 143, col: 1, offset: 3626}, expr: &actionExpr{ - pos: position{line: 114, col: 9, offset: 2869}, + pos: position{line: 143, col: 9, offset: 3634}, run: (*parser).callonStmt1, expr: &labeledExpr{ - pos: position{line: 114, col: 9, offset: 2869}, + pos: position{line: 143, col: 9, offset: 3634}, label: "val", expr: &choiceExpr{ - pos: position{line: 114, col: 14, offset: 2874}, + pos: position{line: 143, col: 14, offset: 3639}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 114, col: 14, offset: 2874}, + pos: position{line: 143, col: 14, offset: 3639}, name: "Package", }, &ruleRefExpr{ - pos: position{line: 114, col: 24, offset: 2884}, + pos: position{line: 143, col: 24, offset: 3649}, name: "Import", }, &ruleRefExpr{ - pos: position{line: 114, col: 33, offset: 2893}, + pos: position{line: 143, col: 33, offset: 3658}, name: "Rules", }, &ruleRefExpr{ - pos: position{line: 114, col: 41, offset: 2901}, - name: "UserFunc", - }, - &ruleRefExpr{ - pos: position{line: 114, col: 52, offset: 2912}, + pos: position{line: 143, col: 41, offset: 3666}, name: "Body", }, &ruleRefExpr{ - pos: position{line: 114, col: 59, offset: 2919}, + pos: position{line: 143, col: 48, offset: 3673}, name: "Comment", }, }, @@ -214,34 +239,34 @@ var g = &grammar{ }, { name: "Package", - pos: position{line: 118, col: 1, offset: 2953}, + pos: position{line: 147, col: 1, offset: 3707}, expr: &actionExpr{ - pos: position{line: 118, col: 12, offset: 2964}, + pos: position{line: 147, col: 12, offset: 3718}, run: (*parser).callonPackage1, expr: &seqExpr{ - pos: position{line: 118, col: 12, offset: 2964}, + pos: position{line: 147, col: 12, offset: 3718}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 118, col: 12, offset: 2964}, + pos: position{line: 147, col: 12, offset: 3718}, val: "package", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 118, col: 22, offset: 2974}, + pos: position{line: 147, col: 22, offset: 3728}, name: "ws", }, &labeledExpr{ - pos: position{line: 118, col: 25, offset: 2977}, + pos: position{line: 147, col: 25, offset: 3731}, label: "val", expr: &choiceExpr{ - pos: position{line: 118, col: 30, offset: 2982}, + pos: position{line: 147, col: 30, offset: 3736}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 118, col: 30, offset: 2982}, + pos: position{line: 147, col: 30, offset: 3736}, name: "Ref", }, &ruleRefExpr{ - pos: position{line: 118, col: 36, offset: 2988}, + pos: position{line: 147, col: 36, offset: 3742}, name: "Var", }, }, @@ -253,62 +278,62 @@ var g = &grammar{ }, { name: "Import", - pos: position{line: 152, col: 1, offset: 4304}, + pos: position{line: 181, col: 1, offset: 5058}, expr: &actionExpr{ - pos: position{line: 152, col: 11, offset: 4314}, + pos: position{line: 181, col: 11, offset: 5068}, run: (*parser).callonImport1, expr: &seqExpr{ - pos: position{line: 152, col: 11, offset: 4314}, + pos: position{line: 181, col: 11, offset: 5068}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 152, col: 11, offset: 4314}, + pos: position{line: 181, col: 11, offset: 5068}, val: "import", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 152, col: 20, offset: 4323}, + pos: position{line: 181, col: 20, offset: 5077}, name: "ws", }, &labeledExpr{ - pos: position{line: 152, col: 23, offset: 4326}, + pos: position{line: 181, col: 23, offset: 5080}, label: "path", expr: &choiceExpr{ - pos: position{line: 152, col: 29, offset: 4332}, + pos: position{line: 181, col: 29, offset: 5086}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 152, col: 29, offset: 4332}, + pos: position{line: 181, col: 29, offset: 5086}, name: "Ref", }, &ruleRefExpr{ - pos: position{line: 152, col: 35, offset: 4338}, + pos: position{line: 181, col: 35, offset: 5092}, name: "Var", }, }, }, }, &labeledExpr{ - pos: position{line: 152, col: 40, offset: 4343}, + pos: position{line: 181, col: 40, offset: 5097}, label: "alias", expr: &zeroOrOneExpr{ - pos: position{line: 152, col: 46, offset: 4349}, + pos: position{line: 181, col: 46, offset: 5103}, expr: &seqExpr{ - pos: position{line: 152, col: 47, offset: 4350}, + pos: position{line: 181, col: 47, offset: 5104}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 152, col: 47, offset: 4350}, + pos: position{line: 181, col: 47, offset: 5104}, name: "ws", }, &litMatcher{ - pos: position{line: 152, col: 50, offset: 4353}, + pos: position{line: 181, col: 50, offset: 5107}, val: "as", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 152, col: 55, offset: 4358}, + pos: position{line: 181, col: 55, offset: 5112}, name: "ws", }, &ruleRefExpr{ - pos: position{line: 152, col: 58, offset: 4361}, + pos: position{line: 181, col: 58, offset: 5115}, name: "Var", }, }, @@ -321,16 +346,16 @@ var g = &grammar{ }, { name: "Rules", - pos: position{line: 168, col: 1, offset: 4811}, + pos: position{line: 197, col: 1, offset: 5565}, expr: &choiceExpr{ - pos: position{line: 168, col: 10, offset: 4820}, + pos: position{line: 197, col: 10, offset: 5574}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 168, col: 10, offset: 4820}, + pos: position{line: 197, col: 10, offset: 5574}, name: "DefaultRules", }, &ruleRefExpr{ - pos: position{line: 168, col: 25, offset: 4835}, + pos: position{line: 197, col: 25, offset: 5589}, name: "NormalRules", }, }, @@ -338,48 +363,48 @@ var g = &grammar{ }, { name: "DefaultRules", - pos: position{line: 170, col: 1, offset: 4848}, + pos: position{line: 199, col: 1, offset: 5602}, expr: &actionExpr{ - pos: position{line: 170, col: 17, offset: 4864}, + pos: position{line: 199, col: 17, offset: 5618}, run: (*parser).callonDefaultRules1, expr: &seqExpr{ - pos: position{line: 170, col: 17, offset: 4864}, + pos: position{line: 199, col: 17, offset: 5618}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 170, col: 17, offset: 4864}, + pos: position{line: 199, col: 17, offset: 5618}, val: "default", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 170, col: 27, offset: 4874}, + pos: position{line: 199, col: 27, offset: 5628}, name: "ws", }, &labeledExpr{ - pos: position{line: 170, col: 30, offset: 4877}, + pos: position{line: 199, col: 30, offset: 5631}, label: "name", expr: &ruleRefExpr{ - pos: position{line: 170, col: 35, offset: 4882}, + pos: position{line: 199, col: 35, offset: 5636}, name: "Var", }, }, &ruleRefExpr{ - pos: position{line: 170, col: 39, offset: 4886}, + pos: position{line: 199, col: 39, offset: 5640}, name: "_", }, &litMatcher{ - pos: position{line: 170, col: 41, offset: 4888}, + pos: position{line: 199, col: 41, offset: 5642}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 170, col: 45, offset: 4892}, + pos: position{line: 199, col: 45, offset: 5646}, name: "_", }, &labeledExpr{ - pos: position{line: 170, col: 47, offset: 4894}, + pos: position{line: 199, col: 47, offset: 5648}, label: "value", expr: &ruleRefExpr{ - pos: position{line: 170, col: 53, offset: 4900}, + pos: position{line: 199, col: 53, offset: 5654}, name: "Term", }, }, @@ -389,46 +414,46 @@ var g = &grammar{ }, { name: "NormalRules", - pos: position{line: 213, col: 1, offset: 5869}, + pos: position{line: 242, col: 1, offset: 6623}, expr: &actionExpr{ - pos: position{line: 213, col: 16, offset: 5884}, + pos: position{line: 242, col: 16, offset: 6638}, run: (*parser).callonNormalRules1, expr: &seqExpr{ - pos: position{line: 213, col: 16, offset: 5884}, + pos: position{line: 242, col: 16, offset: 6638}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 213, col: 16, offset: 5884}, + pos: position{line: 242, col: 16, offset: 6638}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 213, col: 21, offset: 5889}, + pos: position{line: 242, col: 21, offset: 6643}, name: "RuleHead", }, }, &ruleRefExpr{ - pos: position{line: 213, col: 30, offset: 5898}, + pos: position{line: 242, col: 30, offset: 6652}, name: "_", }, &labeledExpr{ - pos: position{line: 213, col: 32, offset: 5900}, + pos: position{line: 242, col: 32, offset: 6654}, label: "b", expr: &seqExpr{ - pos: position{line: 213, col: 35, offset: 5903}, + pos: position{line: 242, col: 35, offset: 6657}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 213, col: 35, offset: 5903}, + pos: position{line: 242, col: 35, offset: 6657}, name: "NonEmptyBraceEnclosedBody", }, &zeroOrMoreExpr{ - pos: position{line: 213, col: 61, offset: 5929}, + pos: position{line: 242, col: 61, offset: 6683}, expr: &seqExpr{ - pos: position{line: 213, col: 63, offset: 5931}, + pos: position{line: 242, col: 63, offset: 6685}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 213, col: 63, offset: 5931}, + pos: position{line: 242, col: 63, offset: 6685}, name: "_", }, &ruleRefExpr{ - pos: position{line: 213, col: 65, offset: 5933}, + pos: position{line: 242, col: 65, offset: 6687}, name: "RuleExt", }, }, @@ -442,228 +467,100 @@ var g = &grammar{ }, }, { - name: "UserFunc", - pos: position{line: 268, col: 1, offset: 7229}, - expr: &actionExpr{ - pos: position{line: 268, col: 13, offset: 7241}, - run: (*parser).callonUserFunc1, - expr: &seqExpr{ - pos: position{line: 268, col: 13, offset: 7241}, - exprs: []interface{}{ - &labeledExpr{ - pos: position{line: 268, col: 13, offset: 7241}, - label: "head", - expr: &ruleRefExpr{ - pos: position{line: 268, col: 18, offset: 7246}, - name: "FuncHead", - }, - }, - &ruleRefExpr{ - pos: position{line: 268, col: 27, offset: 7255}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 268, col: 29, offset: 7257}, - label: "b", - expr: &ruleRefExpr{ - pos: position{line: 268, col: 31, offset: 7259}, - name: "NonEmptyBraceEnclosedBody", - }, - }, - }, - }, - }, - }, - { - name: "FuncHead", - pos: position{line: 283, col: 1, offset: 7478}, + name: "RuleHead", + pos: position{line: 298, col: 1, offset: 8017}, expr: &actionExpr{ - pos: position{line: 283, col: 13, offset: 7490}, - run: (*parser).callonFuncHead1, + pos: position{line: 298, col: 13, offset: 8029}, + run: (*parser).callonRuleHead1, expr: &seqExpr{ - pos: position{line: 283, col: 13, offset: 7490}, + pos: position{line: 298, col: 13, offset: 8029}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 283, col: 13, offset: 7490}, + pos: position{line: 298, col: 13, offset: 8029}, label: "name", expr: &ruleRefExpr{ - pos: position{line: 283, col: 18, offset: 7495}, + pos: position{line: 298, col: 18, offset: 8034}, name: "Var", }, }, &labeledExpr{ - pos: position{line: 283, col: 22, offset: 7499}, + pos: position{line: 298, col: 22, offset: 8038}, label: "args", - expr: &ruleRefExpr{ - pos: position{line: 283, col: 27, offset: 7504}, - name: "FuncArgs", - }, - }, - &labeledExpr{ - pos: position{line: 283, col: 36, offset: 7513}, - label: "output", expr: &zeroOrOneExpr{ - pos: position{line: 283, col: 43, offset: 7520}, + pos: position{line: 298, col: 27, offset: 8043}, expr: &seqExpr{ - pos: position{line: 283, col: 45, offset: 7522}, + pos: position{line: 298, col: 29, offset: 8045}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 283, col: 45, offset: 7522}, + pos: position{line: 298, col: 29, offset: 8045}, name: "_", }, &litMatcher{ - pos: position{line: 283, col: 47, offset: 7524}, - val: "=", + pos: position{line: 298, col: 31, offset: 8047}, + val: "(", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 283, col: 51, offset: 7528}, + pos: position{line: 298, col: 35, offset: 8051}, name: "_", }, &ruleRefExpr{ - pos: position{line: 283, col: 53, offset: 7530}, - name: "Term", + pos: position{line: 298, col: 37, offset: 8053}, + name: "Args", }, - }, - }, - }, - }, - }, - }, - }, - }, - { - name: "FuncArgs", - pos: position{line: 299, col: 1, offset: 7834}, - expr: &actionExpr{ - pos: position{line: 299, col: 13, offset: 7846}, - run: (*parser).callonFuncArgs1, - expr: &seqExpr{ - pos: position{line: 299, col: 13, offset: 7846}, - exprs: []interface{}{ - &ruleRefExpr{ - pos: position{line: 299, col: 13, offset: 7846}, - name: "_", - }, - &litMatcher{ - pos: position{line: 299, col: 15, offset: 7848}, - val: "(", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 299, col: 20, offset: 7853}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 299, col: 23, offset: 7856}, - label: "head", - expr: &zeroOrOneExpr{ - pos: position{line: 299, col: 28, offset: 7861}, - expr: &ruleRefExpr{ - pos: position{line: 299, col: 28, offset: 7861}, - name: "ArgTerm", - }, - }, - }, - &labeledExpr{ - pos: position{line: 299, col: 37, offset: 7870}, - label: "tail", - expr: &zeroOrMoreExpr{ - pos: position{line: 299, col: 42, offset: 7875}, - expr: &seqExpr{ - pos: position{line: 299, col: 43, offset: 7876}, - exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 299, col: 43, offset: 7876}, + pos: position{line: 298, col: 42, offset: 8058}, name: "_", }, &litMatcher{ - pos: position{line: 299, col: 45, offset: 7878}, - val: ",", + pos: position{line: 298, col: 44, offset: 8060}, + val: ")", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 299, col: 49, offset: 7882}, + pos: position{line: 298, col: 48, offset: 8064}, name: "_", }, - &ruleRefExpr{ - pos: position{line: 299, col: 51, offset: 7884}, - name: "ArgTerm", - }, }, }, }, }, - &ruleRefExpr{ - pos: position{line: 299, col: 61, offset: 7894}, - name: "_", - }, - &litMatcher{ - pos: position{line: 299, col: 63, offset: 7896}, - val: ")", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 299, col: 67, offset: 7900}, - name: "_", - }, - }, - }, - }, - }, - { - name: "RuleHead", - pos: position{line: 320, col: 1, offset: 8320}, - expr: &actionExpr{ - pos: position{line: 320, col: 13, offset: 8332}, - run: (*parser).callonRuleHead1, - expr: &seqExpr{ - pos: position{line: 320, col: 13, offset: 8332}, - exprs: []interface{}{ - &labeledExpr{ - pos: position{line: 320, col: 13, offset: 8332}, - label: "name", - expr: &ruleRefExpr{ - pos: position{line: 320, col: 18, offset: 8337}, - name: "Var", - }, - }, &labeledExpr{ - pos: position{line: 320, col: 22, offset: 8341}, + pos: position{line: 298, col: 53, offset: 8069}, label: "key", expr: &zeroOrOneExpr{ - pos: position{line: 320, col: 26, offset: 8345}, + pos: position{line: 298, col: 57, offset: 8073}, expr: &seqExpr{ - pos: position{line: 320, col: 28, offset: 8347}, + pos: position{line: 298, col: 59, offset: 8075}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 320, col: 28, offset: 8347}, + pos: position{line: 298, col: 59, offset: 8075}, name: "_", }, &litMatcher{ - pos: position{line: 320, col: 30, offset: 8349}, + pos: position{line: 298, col: 61, offset: 8077}, val: "[", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 320, col: 34, offset: 8353}, + pos: position{line: 298, col: 65, offset: 8081}, name: "_", }, &ruleRefExpr{ - pos: position{line: 320, col: 36, offset: 8355}, + pos: position{line: 298, col: 67, offset: 8083}, name: "Term", }, &ruleRefExpr{ - pos: position{line: 320, col: 41, offset: 8360}, + pos: position{line: 298, col: 72, offset: 8088}, name: "_", }, &litMatcher{ - pos: position{line: 320, col: 43, offset: 8362}, + pos: position{line: 298, col: 74, offset: 8090}, val: "]", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 320, col: 47, offset: 8366}, + pos: position{line: 298, col: 78, offset: 8094}, name: "_", }, }, @@ -671,75 +568,145 @@ var g = &grammar{ }, }, &labeledExpr{ - pos: position{line: 320, col: 52, offset: 8371}, + pos: position{line: 298, col: 83, offset: 8099}, label: "value", expr: &zeroOrOneExpr{ - pos: position{line: 320, col: 58, offset: 8377}, + pos: position{line: 298, col: 89, offset: 8105}, expr: &seqExpr{ - pos: position{line: 320, col: 60, offset: 8379}, + pos: position{line: 298, col: 91, offset: 8107}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 320, col: 60, offset: 8379}, + pos: position{line: 298, col: 91, offset: 8107}, name: "_", }, &litMatcher{ - pos: position{line: 320, col: 62, offset: 8381}, + pos: position{line: 298, col: 93, offset: 8109}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 320, col: 66, offset: 8385}, + pos: position{line: 298, col: 97, offset: 8113}, + name: "_", + }, + &ruleRefExpr{ + pos: position{line: 298, col: 99, offset: 8115}, + name: "Term", + }, + }, + }, + }, + }, + }, + }, + }, + }, + { + name: "Args", + pos: position{line: 341, col: 1, offset: 9323}, + expr: &actionExpr{ + pos: position{line: 341, col: 9, offset: 9331}, + run: (*parser).callonArgs1, + expr: &seqExpr{ + pos: position{line: 341, col: 9, offset: 9331}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 341, col: 9, offset: 9331}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 341, col: 11, offset: 9333}, + label: "head", + expr: &ruleRefExpr{ + pos: position{line: 341, col: 16, offset: 9338}, + name: "Term", + }, + }, + &labeledExpr{ + pos: position{line: 341, col: 21, offset: 9343}, + label: "tail", + expr: &zeroOrMoreExpr{ + pos: position{line: 341, col: 26, offset: 9348}, + expr: &seqExpr{ + pos: position{line: 341, col: 27, offset: 9349}, + exprs: []interface{}{ + &ruleRefExpr{ + pos: position{line: 341, col: 27, offset: 9349}, + name: "_", + }, + &litMatcher{ + pos: position{line: 341, col: 29, offset: 9351}, + val: ",", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 341, col: 33, offset: 9355}, name: "_", }, &ruleRefExpr{ - pos: position{line: 320, col: 68, offset: 8387}, + pos: position{line: 341, col: 35, offset: 9357}, name: "Term", }, }, }, }, }, + &ruleRefExpr{ + pos: position{line: 341, col: 42, offset: 9364}, + name: "_", + }, + &zeroOrOneExpr{ + pos: position{line: 341, col: 44, offset: 9366}, + expr: &litMatcher{ + pos: position{line: 341, col: 44, offset: 9366}, + val: ",", + ignoreCase: false, + }, + }, + &ruleRefExpr{ + pos: position{line: 341, col: 49, offset: 9371}, + name: "_", + }, }, }, }, }, { name: "Else", - pos: position{line: 355, col: 1, offset: 9375}, + pos: position{line: 345, col: 1, offset: 9427}, expr: &actionExpr{ - pos: position{line: 355, col: 9, offset: 9383}, + pos: position{line: 345, col: 9, offset: 9435}, run: (*parser).callonElse1, expr: &seqExpr{ - pos: position{line: 355, col: 9, offset: 9383}, + pos: position{line: 345, col: 9, offset: 9435}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 355, col: 9, offset: 9383}, + pos: position{line: 345, col: 9, offset: 9435}, val: "else", ignoreCase: false, }, &labeledExpr{ - pos: position{line: 355, col: 16, offset: 9390}, + pos: position{line: 345, col: 16, offset: 9442}, label: "val", expr: &zeroOrOneExpr{ - pos: position{line: 355, col: 20, offset: 9394}, + pos: position{line: 345, col: 20, offset: 9446}, expr: &seqExpr{ - pos: position{line: 355, col: 22, offset: 9396}, + pos: position{line: 345, col: 22, offset: 9448}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 355, col: 22, offset: 9396}, + pos: position{line: 345, col: 22, offset: 9448}, name: "_", }, &litMatcher{ - pos: position{line: 355, col: 24, offset: 9398}, + pos: position{line: 345, col: 24, offset: 9450}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 355, col: 28, offset: 9402}, + pos: position{line: 345, col: 28, offset: 9454}, name: "_", }, &ruleRefExpr{ - pos: position{line: 355, col: 30, offset: 9404}, + pos: position{line: 345, col: 30, offset: 9456}, name: "Term", }, }, @@ -747,17 +714,17 @@ var g = &grammar{ }, }, &labeledExpr{ - pos: position{line: 355, col: 38, offset: 9412}, + pos: position{line: 345, col: 38, offset: 9464}, label: "b", expr: &seqExpr{ - pos: position{line: 355, col: 42, offset: 9416}, + pos: position{line: 345, col: 42, offset: 9468}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 355, col: 42, offset: 9416}, + pos: position{line: 345, col: 42, offset: 9468}, name: "_", }, &ruleRefExpr{ - pos: position{line: 355, col: 44, offset: 9418}, + pos: position{line: 345, col: 44, offset: 9470}, name: "NonEmptyBraceEnclosedBody", }, }, @@ -769,15 +736,15 @@ var g = &grammar{ }, { name: "RuleDup", - pos: position{line: 370, col: 1, offset: 9770}, + pos: position{line: 360, col: 1, offset: 9822}, expr: &actionExpr{ - pos: position{line: 370, col: 12, offset: 9781}, + pos: position{line: 360, col: 12, offset: 9833}, run: (*parser).callonRuleDup1, expr: &labeledExpr{ - pos: position{line: 370, col: 12, offset: 9781}, + pos: position{line: 360, col: 12, offset: 9833}, label: "b", expr: &ruleRefExpr{ - pos: position{line: 370, col: 14, offset: 9783}, + pos: position{line: 360, col: 14, offset: 9835}, name: "NonEmptyBraceEnclosedBody", }, }, @@ -785,16 +752,16 @@ var g = &grammar{ }, { name: "RuleExt", - pos: position{line: 374, col: 1, offset: 9879}, + pos: position{line: 364, col: 1, offset: 9931}, expr: &choiceExpr{ - pos: position{line: 374, col: 12, offset: 9890}, + pos: position{line: 364, col: 12, offset: 9942}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 374, col: 12, offset: 9890}, + pos: position{line: 364, col: 12, offset: 9942}, name: "Else", }, &ruleRefExpr{ - pos: position{line: 374, col: 19, offset: 9897}, + pos: position{line: 364, col: 19, offset: 9949}, name: "RuleDup", }, }, @@ -802,16 +769,16 @@ var g = &grammar{ }, { name: "Body", - pos: position{line: 376, col: 1, offset: 9906}, + pos: position{line: 366, col: 1, offset: 9958}, expr: &choiceExpr{ - pos: position{line: 376, col: 9, offset: 9914}, + pos: position{line: 366, col: 9, offset: 9966}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 376, col: 9, offset: 9914}, + pos: position{line: 366, col: 9, offset: 9966}, name: "NonWhitespaceBody", }, &ruleRefExpr{ - pos: position{line: 376, col: 29, offset: 9934}, + pos: position{line: 366, col: 29, offset: 9986}, name: "BraceEnclosedBody", }, }, @@ -819,39 +786,39 @@ var g = &grammar{ }, { name: "NonEmptyBraceEnclosedBody", - pos: position{line: 378, col: 1, offset: 9953}, + pos: position{line: 368, col: 1, offset: 10005}, expr: &actionExpr{ - pos: position{line: 378, col: 30, offset: 9982}, + pos: position{line: 368, col: 30, offset: 10034}, run: (*parser).callonNonEmptyBraceEnclosedBody1, expr: &seqExpr{ - pos: position{line: 378, col: 30, offset: 9982}, + pos: position{line: 368, col: 30, offset: 10034}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 378, col: 30, offset: 9982}, + pos: position{line: 368, col: 30, offset: 10034}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 378, col: 34, offset: 9986}, + pos: position{line: 368, col: 34, offset: 10038}, name: "_", }, &labeledExpr{ - pos: position{line: 378, col: 36, offset: 9988}, + pos: position{line: 368, col: 36, offset: 10040}, label: "val", expr: &zeroOrOneExpr{ - pos: position{line: 378, col: 40, offset: 9992}, + pos: position{line: 368, col: 40, offset: 10044}, expr: &ruleRefExpr{ - pos: position{line: 378, col: 40, offset: 9992}, + pos: position{line: 368, col: 40, offset: 10044}, name: "WhitespaceBody", }, }, }, &ruleRefExpr{ - pos: position{line: 378, col: 56, offset: 10008}, + pos: position{line: 368, col: 56, offset: 10060}, name: "_", }, &litMatcher{ - pos: position{line: 378, col: 58, offset: 10010}, + pos: position{line: 368, col: 58, offset: 10062}, val: "}", ignoreCase: false, }, @@ -861,39 +828,39 @@ var g = &grammar{ }, { name: "BraceEnclosedBody", - pos: position{line: 385, col: 1, offset: 10105}, + pos: position{line: 375, col: 1, offset: 10157}, expr: &actionExpr{ - pos: position{line: 385, col: 22, offset: 10126}, + pos: position{line: 375, col: 22, offset: 10178}, run: (*parser).callonBraceEnclosedBody1, expr: &seqExpr{ - pos: position{line: 385, col: 22, offset: 10126}, + pos: position{line: 375, col: 22, offset: 10178}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 385, col: 22, offset: 10126}, + pos: position{line: 375, col: 22, offset: 10178}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 385, col: 26, offset: 10130}, + pos: position{line: 375, col: 26, offset: 10182}, name: "_", }, &labeledExpr{ - pos: position{line: 385, col: 28, offset: 10132}, + pos: position{line: 375, col: 28, offset: 10184}, label: "val", expr: &zeroOrOneExpr{ - pos: position{line: 385, col: 32, offset: 10136}, + pos: position{line: 375, col: 32, offset: 10188}, expr: &ruleRefExpr{ - pos: position{line: 385, col: 32, offset: 10136}, + pos: position{line: 375, col: 32, offset: 10188}, name: "WhitespaceBody", }, }, }, &ruleRefExpr{ - pos: position{line: 385, col: 48, offset: 10152}, + pos: position{line: 375, col: 48, offset: 10204}, name: "_", }, &litMatcher{ - pos: position{line: 385, col: 50, offset: 10154}, + pos: position{line: 375, col: 50, offset: 10206}, val: "}", ignoreCase: false, }, @@ -903,33 +870,33 @@ var g = &grammar{ }, { name: "WhitespaceBody", - pos: position{line: 399, col: 1, offset: 10506}, + pos: position{line: 389, col: 1, offset: 10558}, expr: &actionExpr{ - pos: position{line: 399, col: 19, offset: 10524}, + pos: position{line: 389, col: 19, offset: 10576}, run: (*parser).callonWhitespaceBody1, expr: &seqExpr{ - pos: position{line: 399, col: 19, offset: 10524}, + pos: position{line: 389, col: 19, offset: 10576}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 399, col: 19, offset: 10524}, + pos: position{line: 389, col: 19, offset: 10576}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 399, col: 24, offset: 10529}, + pos: position{line: 389, col: 24, offset: 10581}, name: "Literal", }, }, &labeledExpr{ - pos: position{line: 399, col: 32, offset: 10537}, + pos: position{line: 389, col: 32, offset: 10589}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 399, col: 37, offset: 10542}, + pos: position{line: 389, col: 37, offset: 10594}, expr: &seqExpr{ - pos: position{line: 399, col: 38, offset: 10543}, + pos: position{line: 389, col: 38, offset: 10595}, exprs: []interface{}{ &zeroOrMoreExpr{ - pos: position{line: 399, col: 38, offset: 10543}, + pos: position{line: 389, col: 38, offset: 10595}, expr: &charClassMatcher{ - pos: position{line: 399, col: 38, offset: 10543}, + pos: position{line: 389, col: 38, offset: 10595}, val: "[ \\t]", chars: []rune{' ', '\t'}, ignoreCase: false, @@ -937,37 +904,37 @@ var g = &grammar{ }, }, &choiceExpr{ - pos: position{line: 399, col: 46, offset: 10551}, + pos: position{line: 389, col: 46, offset: 10603}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 399, col: 47, offset: 10552}, + pos: position{line: 389, col: 47, offset: 10604}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 399, col: 47, offset: 10552}, + pos: position{line: 389, col: 47, offset: 10604}, val: ";", ignoreCase: false, }, &zeroOrOneExpr{ - pos: position{line: 399, col: 51, offset: 10556}, + pos: position{line: 389, col: 51, offset: 10608}, expr: &ruleRefExpr{ - pos: position{line: 399, col: 51, offset: 10556}, + pos: position{line: 389, col: 51, offset: 10608}, name: "Comment", }, }, }, }, &seqExpr{ - pos: position{line: 399, col: 64, offset: 10569}, + pos: position{line: 389, col: 64, offset: 10621}, exprs: []interface{}{ &zeroOrOneExpr{ - pos: position{line: 399, col: 64, offset: 10569}, + pos: position{line: 389, col: 64, offset: 10621}, expr: &ruleRefExpr{ - pos: position{line: 399, col: 64, offset: 10569}, + pos: position{line: 389, col: 64, offset: 10621}, name: "Comment", }, }, &charClassMatcher{ - pos: position{line: 399, col: 73, offset: 10578}, + pos: position{line: 389, col: 73, offset: 10630}, val: "[\\r\\n]", chars: []rune{'\r', '\n'}, ignoreCase: false, @@ -978,11 +945,11 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 399, col: 82, offset: 10587}, + pos: position{line: 389, col: 82, offset: 10639}, name: "_", }, &ruleRefExpr{ - pos: position{line: 399, col: 84, offset: 10589}, + pos: position{line: 389, col: 84, offset: 10641}, name: "Literal", }, }, @@ -995,44 +962,44 @@ var g = &grammar{ }, { name: "NonWhitespaceBody", - pos: position{line: 405, col: 1, offset: 10778}, + pos: position{line: 395, col: 1, offset: 10830}, expr: &actionExpr{ - pos: position{line: 405, col: 22, offset: 10799}, + pos: position{line: 395, col: 22, offset: 10851}, run: (*parser).callonNonWhitespaceBody1, expr: &seqExpr{ - pos: position{line: 405, col: 22, offset: 10799}, + pos: position{line: 395, col: 22, offset: 10851}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 405, col: 22, offset: 10799}, + pos: position{line: 395, col: 22, offset: 10851}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 405, col: 27, offset: 10804}, + pos: position{line: 395, col: 27, offset: 10856}, name: "Literal", }, }, &labeledExpr{ - pos: position{line: 405, col: 35, offset: 10812}, + pos: position{line: 395, col: 35, offset: 10864}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 405, col: 40, offset: 10817}, + pos: position{line: 395, col: 40, offset: 10869}, expr: &seqExpr{ - pos: position{line: 405, col: 42, offset: 10819}, + pos: position{line: 395, col: 42, offset: 10871}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 405, col: 42, offset: 10819}, + pos: position{line: 395, col: 42, offset: 10871}, name: "_", }, &litMatcher{ - pos: position{line: 405, col: 44, offset: 10821}, + pos: position{line: 395, col: 44, offset: 10873}, val: ";", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 405, col: 48, offset: 10825}, + pos: position{line: 395, col: 48, offset: 10877}, name: "_", }, &ruleRefExpr{ - pos: position{line: 405, col: 50, offset: 10827}, + pos: position{line: 395, col: 50, offset: 10879}, name: "Literal", }, }, @@ -1045,28 +1012,28 @@ var g = &grammar{ }, { name: "Literal", - pos: position{line: 409, col: 1, offset: 10902}, + pos: position{line: 399, col: 1, offset: 10954}, expr: &actionExpr{ - pos: position{line: 409, col: 12, offset: 10913}, + pos: position{line: 399, col: 12, offset: 10965}, run: (*parser).callonLiteral1, expr: &seqExpr{ - pos: position{line: 409, col: 12, offset: 10913}, + pos: position{line: 399, col: 12, offset: 10965}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 409, col: 12, offset: 10913}, + pos: position{line: 399, col: 12, offset: 10965}, label: "neg", expr: &zeroOrOneExpr{ - pos: position{line: 409, col: 16, offset: 10917}, + pos: position{line: 399, col: 16, offset: 10969}, expr: &seqExpr{ - pos: position{line: 409, col: 18, offset: 10919}, + pos: position{line: 399, col: 18, offset: 10971}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 409, col: 18, offset: 10919}, + pos: position{line: 399, col: 18, offset: 10971}, val: "not", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 409, col: 24, offset: 10925}, + pos: position{line: 399, col: 24, offset: 10977}, name: "ws", }, }, @@ -1074,40 +1041,40 @@ var g = &grammar{ }, }, &labeledExpr{ - pos: position{line: 409, col: 30, offset: 10931}, + pos: position{line: 399, col: 30, offset: 10983}, label: "val", expr: &ruleRefExpr{ - pos: position{line: 409, col: 34, offset: 10935}, + pos: position{line: 399, col: 34, offset: 10987}, name: "Expr", }, }, &labeledExpr{ - pos: position{line: 409, col: 39, offset: 10940}, + pos: position{line: 399, col: 39, offset: 10992}, label: "with", expr: &zeroOrOneExpr{ - pos: position{line: 409, col: 44, offset: 10945}, + pos: position{line: 399, col: 44, offset: 10997}, expr: &seqExpr{ - pos: position{line: 409, col: 46, offset: 10947}, + pos: position{line: 399, col: 46, offset: 10999}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 409, col: 46, offset: 10947}, + pos: position{line: 399, col: 46, offset: 10999}, name: "ws", }, &ruleRefExpr{ - pos: position{line: 409, col: 49, offset: 10950}, + pos: position{line: 399, col: 49, offset: 11002}, name: "With", }, &zeroOrMoreExpr{ - pos: position{line: 409, col: 54, offset: 10955}, + pos: position{line: 399, col: 54, offset: 11007}, expr: &seqExpr{ - pos: position{line: 409, col: 55, offset: 10956}, + pos: position{line: 399, col: 55, offset: 11008}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 409, col: 55, offset: 10956}, + pos: position{line: 399, col: 55, offset: 11008}, name: "ws", }, &ruleRefExpr{ - pos: position{line: 409, col: 58, offset: 10959}, + pos: position{line: 399, col: 58, offset: 11011}, name: "With", }, }, @@ -1123,48 +1090,48 @@ var g = &grammar{ }, { name: "With", - pos: position{line: 432, col: 1, offset: 11531}, + pos: position{line: 427, col: 1, offset: 11682}, expr: &actionExpr{ - pos: position{line: 432, col: 9, offset: 11539}, + pos: position{line: 427, col: 9, offset: 11690}, run: (*parser).callonWith1, expr: &seqExpr{ - pos: position{line: 432, col: 9, offset: 11539}, + pos: position{line: 427, col: 9, offset: 11690}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 432, col: 9, offset: 11539}, + pos: position{line: 427, col: 9, offset: 11690}, val: "with", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 432, col: 16, offset: 11546}, + pos: position{line: 427, col: 16, offset: 11697}, name: "ws", }, &labeledExpr{ - pos: position{line: 432, col: 19, offset: 11549}, + pos: position{line: 427, col: 19, offset: 11700}, label: "target", expr: &ruleRefExpr{ - pos: position{line: 432, col: 26, offset: 11556}, + pos: position{line: 427, col: 26, offset: 11707}, name: "Term", }, }, &ruleRefExpr{ - pos: position{line: 432, col: 31, offset: 11561}, + pos: position{line: 427, col: 31, offset: 11712}, name: "ws", }, &litMatcher{ - pos: position{line: 432, col: 34, offset: 11564}, + pos: position{line: 427, col: 34, offset: 11715}, val: "as", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 432, col: 39, offset: 11569}, + pos: position{line: 427, col: 39, offset: 11720}, name: "ws", }, &labeledExpr{ - pos: position{line: 432, col: 42, offset: 11572}, + pos: position{line: 427, col: 42, offset: 11723}, label: "value", expr: &ruleRefExpr{ - pos: position{line: 432, col: 48, offset: 11578}, + pos: position{line: 427, col: 48, offset: 11729}, name: "Term", }, }, @@ -1174,163 +1141,201 @@ var g = &grammar{ }, { name: "Expr", - pos: position{line: 443, col: 1, offset: 11827}, + pos: position{line: 438, col: 1, offset: 11978}, + expr: &choiceExpr{ + pos: position{line: 438, col: 9, offset: 11986}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 438, col: 9, offset: 11986}, + name: "InfixExpr", + }, + &ruleRefExpr{ + pos: position{line: 438, col: 21, offset: 11998}, + name: "PrefixExpr", + }, + &ruleRefExpr{ + pos: position{line: 438, col: 34, offset: 12011}, + name: "Term", + }, + }, + }, + }, + { + name: "InfixExpr", + pos: position{line: 440, col: 1, offset: 12017}, expr: &choiceExpr{ - pos: position{line: 443, col: 9, offset: 11835}, + pos: position{line: 440, col: 14, offset: 12030}, alternatives: []interface{}{ &choiceExpr{ - pos: position{line: 443, col: 10, offset: 11836}, + pos: position{line: 440, col: 15, offset: 12031}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 440, col: 15, offset: 12031}, + name: "InfixCallExpr", + }, + &ruleRefExpr{ + pos: position{line: 440, col: 31, offset: 12047}, + name: "InfixCallExprReverse", + }, + }, + }, + &choiceExpr{ + pos: position{line: 440, col: 56, offset: 12072}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 443, col: 10, offset: 11836}, + pos: position{line: 440, col: 56, offset: 12072}, name: "InfixArithExpr", }, &ruleRefExpr{ - pos: position{line: 443, col: 27, offset: 11853}, + pos: position{line: 440, col: 73, offset: 12089}, name: "InfixArithExprReverse", }, }, }, &ruleRefExpr{ - pos: position{line: 443, col: 52, offset: 11878}, - name: "InfixExpr", - }, - &ruleRefExpr{ - pos: position{line: 443, col: 64, offset: 11890}, - name: "PrefixExpr", - }, - &ruleRefExpr{ - pos: position{line: 443, col: 77, offset: 11903}, - name: "Term", + pos: position{line: 440, col: 98, offset: 12114}, + name: "InfixRelationExpr", }, }, }, }, { - name: "InfixArithExpr", - pos: position{line: 445, col: 1, offset: 11909}, + name: "InfixCallExpr", + pos: position{line: 442, col: 1, offset: 12133}, expr: &actionExpr{ - pos: position{line: 445, col: 19, offset: 11927}, - run: (*parser).callonInfixArithExpr1, + pos: position{line: 442, col: 18, offset: 12150}, + run: (*parser).callonInfixCallExpr1, expr: &seqExpr{ - pos: position{line: 445, col: 19, offset: 11927}, + pos: position{line: 442, col: 18, offset: 12150}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 445, col: 19, offset: 11927}, + pos: position{line: 442, col: 18, offset: 12150}, label: "output", expr: &ruleRefExpr{ - pos: position{line: 445, col: 26, offset: 11934}, + pos: position{line: 442, col: 25, offset: 12157}, name: "Term", }, }, &ruleRefExpr{ - pos: position{line: 445, col: 31, offset: 11939}, + pos: position{line: 442, col: 30, offset: 12162}, name: "_", }, &litMatcher{ - pos: position{line: 445, col: 33, offset: 11941}, + pos: position{line: 442, col: 32, offset: 12164}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 445, col: 37, offset: 11945}, + pos: position{line: 442, col: 36, offset: 12168}, name: "_", }, &labeledExpr{ - pos: position{line: 445, col: 39, offset: 11947}, - label: "left", + pos: position{line: 442, col: 38, offset: 12170}, + label: "operator", expr: &ruleRefExpr{ - pos: position{line: 445, col: 44, offset: 11952}, - name: "Term", + pos: position{line: 442, col: 47, offset: 12179}, + name: "Operator", }, }, &ruleRefExpr{ - pos: position{line: 445, col: 49, offset: 11957}, + pos: position{line: 442, col: 56, offset: 12188}, name: "_", }, - &labeledExpr{ - pos: position{line: 445, col: 51, offset: 11959}, - label: "op", - expr: &ruleRefExpr{ - pos: position{line: 445, col: 54, offset: 11962}, - name: "ArithInfixOp", - }, + &litMatcher{ + pos: position{line: 442, col: 58, offset: 12190}, + val: "(", + ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 445, col: 67, offset: 11975}, + pos: position{line: 442, col: 62, offset: 12194}, name: "_", }, &labeledExpr{ - pos: position{line: 445, col: 69, offset: 11977}, - label: "right", + pos: position{line: 442, col: 64, offset: 12196}, + label: "args", expr: &ruleRefExpr{ - pos: position{line: 445, col: 75, offset: 11983}, - name: "Term", + pos: position{line: 442, col: 69, offset: 12201}, + name: "Args", }, }, + &ruleRefExpr{ + pos: position{line: 442, col: 74, offset: 12206}, + name: "_", + }, + &litMatcher{ + pos: position{line: 442, col: 76, offset: 12208}, + val: ")", + ignoreCase: false, + }, }, }, }, }, { - name: "InfixArithExprReverse", - pos: position{line: 449, col: 1, offset: 12074}, + name: "InfixCallExprReverse", + pos: position{line: 446, col: 1, offset: 12270}, expr: &actionExpr{ - pos: position{line: 449, col: 26, offset: 12099}, - run: (*parser).callonInfixArithExprReverse1, + pos: position{line: 446, col: 25, offset: 12294}, + run: (*parser).callonInfixCallExprReverse1, expr: &seqExpr{ - pos: position{line: 449, col: 26, offset: 12099}, + pos: position{line: 446, col: 25, offset: 12294}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 449, col: 26, offset: 12099}, - label: "left", + pos: position{line: 446, col: 25, offset: 12294}, + label: "operator", expr: &ruleRefExpr{ - pos: position{line: 449, col: 31, offset: 12104}, - name: "Term", + pos: position{line: 446, col: 34, offset: 12303}, + name: "Operator", }, }, &ruleRefExpr{ - pos: position{line: 449, col: 36, offset: 12109}, + pos: position{line: 446, col: 43, offset: 12312}, name: "_", }, - &labeledExpr{ - pos: position{line: 449, col: 38, offset: 12111}, - label: "op", - expr: &ruleRefExpr{ - pos: position{line: 449, col: 41, offset: 12114}, - name: "ArithInfixOp", - }, + &litMatcher{ + pos: position{line: 446, col: 45, offset: 12314}, + val: "(", + ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 449, col: 54, offset: 12127}, + pos: position{line: 446, col: 49, offset: 12318}, name: "_", }, &labeledExpr{ - pos: position{line: 449, col: 56, offset: 12129}, - label: "right", + pos: position{line: 446, col: 51, offset: 12320}, + label: "args", expr: &ruleRefExpr{ - pos: position{line: 449, col: 62, offset: 12135}, - name: "Term", + pos: position{line: 446, col: 56, offset: 12325}, + name: "Args", }, }, &ruleRefExpr{ - pos: position{line: 449, col: 67, offset: 12140}, + pos: position{line: 446, col: 61, offset: 12330}, + name: "_", + }, + &litMatcher{ + pos: position{line: 446, col: 63, offset: 12332}, + val: ")", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 446, col: 67, offset: 12336}, name: "_", }, &litMatcher{ - pos: position{line: 449, col: 69, offset: 12142}, + pos: position{line: 446, col: 69, offset: 12338}, val: "=", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 449, col: 73, offset: 12146}, + pos: position{line: 446, col: 73, offset: 12342}, name: "_", }, &labeledExpr{ - pos: position{line: 449, col: 75, offset: 12148}, + pos: position{line: 446, col: 75, offset: 12344}, label: "output", expr: &ruleRefExpr{ - pos: position{line: 449, col: 82, offset: 12155}, + pos: position{line: 446, col: 82, offset: 12351}, name: "Term", }, }, @@ -1339,49 +1344,181 @@ var g = &grammar{ }, }, { - name: "ArithInfixOp", - pos: position{line: 453, col: 1, offset: 12246}, + name: "InfixArithExpr", + pos: position{line: 450, col: 1, offset: 12414}, expr: &actionExpr{ - pos: position{line: 453, col: 17, offset: 12262}, - run: (*parser).callonArithInfixOp1, - expr: &labeledExpr{ - pos: position{line: 453, col: 17, offset: 12262}, - label: "val", - expr: &choiceExpr{ - pos: position{line: 453, col: 22, offset: 12267}, - alternatives: []interface{}{ - &litMatcher{ - pos: position{line: 453, col: 22, offset: 12267}, - val: "+", - ignoreCase: false, + pos: position{line: 450, col: 19, offset: 12432}, + run: (*parser).callonInfixArithExpr1, + expr: &seqExpr{ + pos: position{line: 450, col: 19, offset: 12432}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 450, col: 19, offset: 12432}, + label: "output", + expr: &ruleRefExpr{ + pos: position{line: 450, col: 26, offset: 12439}, + name: "Term", }, - &litMatcher{ - pos: position{line: 453, col: 28, offset: 12273}, - val: "-", + }, + &ruleRefExpr{ + pos: position{line: 450, col: 31, offset: 12444}, + name: "_", + }, + &litMatcher{ + pos: position{line: 450, col: 33, offset: 12446}, + val: "=", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 450, col: 37, offset: 12450}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 450, col: 39, offset: 12452}, + label: "left", + expr: &ruleRefExpr{ + pos: position{line: 450, col: 44, offset: 12457}, + name: "Term", + }, + }, + &ruleRefExpr{ + pos: position{line: 450, col: 49, offset: 12462}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 450, col: 51, offset: 12464}, + label: "operator", + expr: &ruleRefExpr{ + pos: position{line: 450, col: 60, offset: 12473}, + name: "ArithInfixOp", + }, + }, + &ruleRefExpr{ + pos: position{line: 450, col: 73, offset: 12486}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 450, col: 75, offset: 12488}, + label: "right", + expr: &ruleRefExpr{ + pos: position{line: 450, col: 81, offset: 12494}, + name: "Term", + }, + }, + }, + }, + }, + }, + { + name: "InfixArithExprReverse", + pos: position{line: 454, col: 1, offset: 12586}, + expr: &actionExpr{ + pos: position{line: 454, col: 26, offset: 12611}, + run: (*parser).callonInfixArithExprReverse1, + expr: &seqExpr{ + pos: position{line: 454, col: 26, offset: 12611}, + exprs: []interface{}{ + &labeledExpr{ + pos: position{line: 454, col: 26, offset: 12611}, + label: "left", + expr: &ruleRefExpr{ + pos: position{line: 454, col: 31, offset: 12616}, + name: "Term", + }, + }, + &ruleRefExpr{ + pos: position{line: 454, col: 36, offset: 12621}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 454, col: 38, offset: 12623}, + label: "operator", + expr: &ruleRefExpr{ + pos: position{line: 454, col: 47, offset: 12632}, + name: "ArithInfixOp", + }, + }, + &ruleRefExpr{ + pos: position{line: 454, col: 60, offset: 12645}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 454, col: 62, offset: 12647}, + label: "right", + expr: &ruleRefExpr{ + pos: position{line: 454, col: 68, offset: 12653}, + name: "Term", + }, + }, + &ruleRefExpr{ + pos: position{line: 454, col: 73, offset: 12658}, + name: "_", + }, + &litMatcher{ + pos: position{line: 454, col: 75, offset: 12660}, + val: "=", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 454, col: 79, offset: 12664}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 454, col: 81, offset: 12666}, + label: "output", + expr: &ruleRefExpr{ + pos: position{line: 454, col: 88, offset: 12673}, + name: "Term", + }, + }, + }, + }, + }, + }, + { + name: "ArithInfixOp", + pos: position{line: 458, col: 1, offset: 12765}, + expr: &actionExpr{ + pos: position{line: 458, col: 17, offset: 12781}, + run: (*parser).callonArithInfixOp1, + expr: &labeledExpr{ + pos: position{line: 458, col: 17, offset: 12781}, + label: "val", + expr: &choiceExpr{ + pos: position{line: 458, col: 22, offset: 12786}, + alternatives: []interface{}{ + &litMatcher{ + pos: position{line: 458, col: 22, offset: 12786}, + val: "+", + ignoreCase: false, + }, + &litMatcher{ + pos: position{line: 458, col: 28, offset: 12792}, + val: "-", ignoreCase: false, }, &litMatcher{ - pos: position{line: 453, col: 34, offset: 12279}, + pos: position{line: 458, col: 34, offset: 12798}, val: "*", ignoreCase: false, }, &litMatcher{ - pos: position{line: 453, col: 40, offset: 12285}, + pos: position{line: 458, col: 40, offset: 12804}, val: "/", ignoreCase: false, }, &litMatcher{ - pos: position{line: 453, col: 46, offset: 12291}, + pos: position{line: 458, col: 46, offset: 12810}, val: "&", ignoreCase: false, }, &litMatcher{ - pos: position{line: 453, col: 52, offset: 12297}, + pos: position{line: 458, col: 52, offset: 12816}, val: "|", ignoreCase: false, }, &litMatcher{ - pos: position{line: 453, col: 58, offset: 12303}, + pos: position{line: 458, col: 58, offset: 12822}, val: "-", ignoreCase: false, }, @@ -1391,43 +1528,43 @@ var g = &grammar{ }, }, { - name: "InfixExpr", - pos: position{line: 465, col: 1, offset: 12577}, + name: "InfixRelationExpr", + pos: position{line: 470, col: 1, offset: 13096}, expr: &actionExpr{ - pos: position{line: 465, col: 14, offset: 12590}, - run: (*parser).callonInfixExpr1, + pos: position{line: 470, col: 22, offset: 13117}, + run: (*parser).callonInfixRelationExpr1, expr: &seqExpr{ - pos: position{line: 465, col: 14, offset: 12590}, + pos: position{line: 470, col: 22, offset: 13117}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 465, col: 14, offset: 12590}, + pos: position{line: 470, col: 22, offset: 13117}, label: "left", expr: &ruleRefExpr{ - pos: position{line: 465, col: 19, offset: 12595}, + pos: position{line: 470, col: 27, offset: 13122}, name: "Term", }, }, &ruleRefExpr{ - pos: position{line: 465, col: 24, offset: 12600}, + pos: position{line: 470, col: 32, offset: 13127}, name: "_", }, &labeledExpr{ - pos: position{line: 465, col: 26, offset: 12602}, - label: "op", + pos: position{line: 470, col: 34, offset: 13129}, + label: "operator", expr: &ruleRefExpr{ - pos: position{line: 465, col: 29, offset: 12605}, - name: "InfixOp", + pos: position{line: 470, col: 43, offset: 13138}, + name: "InfixRelationOp", }, }, &ruleRefExpr{ - pos: position{line: 465, col: 37, offset: 12613}, + pos: position{line: 470, col: 59, offset: 13154}, name: "_", }, &labeledExpr{ - pos: position{line: 465, col: 39, offset: 12615}, + pos: position{line: 470, col: 61, offset: 13156}, label: "right", expr: &ruleRefExpr{ - pos: position{line: 465, col: 45, offset: 12621}, + pos: position{line: 470, col: 67, offset: 13162}, name: "Term", }, }, @@ -1436,44 +1573,44 @@ var g = &grammar{ }, }, { - name: "InfixOp", - pos: position{line: 469, col: 1, offset: 12696}, + name: "InfixRelationOp", + pos: position{line: 481, col: 1, offset: 13340}, expr: &actionExpr{ - pos: position{line: 469, col: 12, offset: 12707}, - run: (*parser).callonInfixOp1, + pos: position{line: 481, col: 20, offset: 13359}, + run: (*parser).callonInfixRelationOp1, expr: &labeledExpr{ - pos: position{line: 469, col: 12, offset: 12707}, + pos: position{line: 481, col: 20, offset: 13359}, label: "val", expr: &choiceExpr{ - pos: position{line: 469, col: 17, offset: 12712}, + pos: position{line: 481, col: 25, offset: 13364}, alternatives: []interface{}{ &litMatcher{ - pos: position{line: 469, col: 17, offset: 12712}, + pos: position{line: 481, col: 25, offset: 13364}, val: "=", ignoreCase: false, }, &litMatcher{ - pos: position{line: 469, col: 23, offset: 12718}, + pos: position{line: 481, col: 31, offset: 13370}, val: "!=", ignoreCase: false, }, &litMatcher{ - pos: position{line: 469, col: 30, offset: 12725}, + pos: position{line: 481, col: 38, offset: 13377}, val: "<=", ignoreCase: false, }, &litMatcher{ - pos: position{line: 469, col: 37, offset: 12732}, + pos: position{line: 481, col: 45, offset: 13384}, val: ">=", ignoreCase: false, }, &litMatcher{ - pos: position{line: 469, col: 44, offset: 12739}, + pos: position{line: 481, col: 52, offset: 13391}, val: "<", ignoreCase: false, }, &litMatcher{ - pos: position{line: 469, col: 50, offset: 12745}, + pos: position{line: 481, col: 58, offset: 13397}, val: ">", ignoreCase: false, }, @@ -1484,16 +1621,16 @@ var g = &grammar{ }, { name: "PrefixExpr", - pos: position{line: 481, col: 1, offset: 13019}, + pos: position{line: 493, col: 1, offset: 13671}, expr: &choiceExpr{ - pos: position{line: 481, col: 15, offset: 13033}, + pos: position{line: 493, col: 15, offset: 13685}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 481, col: 15, offset: 13033}, + pos: position{line: 493, col: 15, offset: 13685}, name: "SetEmpty", }, &ruleRefExpr{ - pos: position{line: 481, col: 26, offset: 13044}, + pos: position{line: 493, col: 26, offset: 13696}, name: "Call", }, }, @@ -1501,64 +1638,64 @@ var g = &grammar{ }, { name: "Call", - pos: position{line: 483, col: 1, offset: 13050}, + pos: position{line: 495, col: 1, offset: 13702}, expr: &actionExpr{ - pos: position{line: 483, col: 9, offset: 13058}, + pos: position{line: 495, col: 9, offset: 13710}, run: (*parser).callonCall1, expr: &seqExpr{ - pos: position{line: 483, col: 9, offset: 13058}, + pos: position{line: 495, col: 9, offset: 13710}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 483, col: 9, offset: 13058}, + pos: position{line: 495, col: 9, offset: 13710}, label: "name", expr: &ruleRefExpr{ - pos: position{line: 483, col: 14, offset: 13063}, + pos: position{line: 495, col: 14, offset: 13715}, name: "Operator", }, }, &litMatcher{ - pos: position{line: 483, col: 23, offset: 13072}, + pos: position{line: 495, col: 23, offset: 13724}, val: "(", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 483, col: 27, offset: 13076}, + pos: position{line: 495, col: 27, offset: 13728}, name: "_", }, &labeledExpr{ - pos: position{line: 483, col: 29, offset: 13078}, + pos: position{line: 495, col: 29, offset: 13730}, label: "head", expr: &zeroOrOneExpr{ - pos: position{line: 483, col: 34, offset: 13083}, + pos: position{line: 495, col: 34, offset: 13735}, expr: &ruleRefExpr{ - pos: position{line: 483, col: 34, offset: 13083}, + pos: position{line: 495, col: 34, offset: 13735}, name: "Term", }, }, }, &labeledExpr{ - pos: position{line: 483, col: 40, offset: 13089}, + pos: position{line: 495, col: 40, offset: 13741}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 483, col: 45, offset: 13094}, + pos: position{line: 495, col: 45, offset: 13746}, expr: &seqExpr{ - pos: position{line: 483, col: 47, offset: 13096}, + pos: position{line: 495, col: 47, offset: 13748}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 483, col: 47, offset: 13096}, + pos: position{line: 495, col: 47, offset: 13748}, name: "_", }, &litMatcher{ - pos: position{line: 483, col: 49, offset: 13098}, + pos: position{line: 495, col: 49, offset: 13750}, val: ",", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 483, col: 53, offset: 13102}, + pos: position{line: 495, col: 53, offset: 13754}, name: "_", }, &ruleRefExpr{ - pos: position{line: 483, col: 55, offset: 13104}, + pos: position{line: 495, col: 55, offset: 13756}, name: "Term", }, }, @@ -1566,11 +1703,11 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 483, col: 63, offset: 13112}, + pos: position{line: 495, col: 63, offset: 13764}, name: "_", }, &litMatcher{ - pos: position{line: 483, col: 66, offset: 13115}, + pos: position{line: 495, col: 66, offset: 13767}, val: ")", ignoreCase: false, }, @@ -1580,22 +1717,22 @@ var g = &grammar{ }, { name: "Operator", - pos: position{line: 499, col: 1, offset: 13519}, + pos: position{line: 513, col: 1, offset: 14201}, expr: &actionExpr{ - pos: position{line: 499, col: 13, offset: 13531}, + pos: position{line: 513, col: 13, offset: 14213}, run: (*parser).callonOperator1, expr: &labeledExpr{ - pos: position{line: 499, col: 13, offset: 13531}, + pos: position{line: 513, col: 13, offset: 14213}, label: "val", expr: &choiceExpr{ - pos: position{line: 499, col: 18, offset: 13536}, + pos: position{line: 513, col: 18, offset: 14218}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 499, col: 18, offset: 13536}, + pos: position{line: 513, col: 18, offset: 14218}, name: "Ref", }, &ruleRefExpr{ - pos: position{line: 499, col: 24, offset: 13542}, + pos: position{line: 513, col: 24, offset: 14224}, name: "Var", }, }, @@ -1605,34 +1742,34 @@ var g = &grammar{ }, { name: "Term", - pos: position{line: 511, col: 1, offset: 13773}, + pos: position{line: 525, col: 1, offset: 14455}, expr: &actionExpr{ - pos: position{line: 511, col: 9, offset: 13781}, + pos: position{line: 525, col: 9, offset: 14463}, run: (*parser).callonTerm1, expr: &labeledExpr{ - pos: position{line: 511, col: 9, offset: 13781}, + pos: position{line: 525, col: 9, offset: 14463}, label: "val", expr: &choiceExpr{ - pos: position{line: 511, col: 15, offset: 13787}, + pos: position{line: 525, col: 15, offset: 14469}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 511, col: 15, offset: 13787}, + pos: position{line: 525, col: 15, offset: 14469}, name: "Comprehension", }, &ruleRefExpr{ - pos: position{line: 511, col: 31, offset: 13803}, + pos: position{line: 525, col: 31, offset: 14485}, name: "Composite", }, &ruleRefExpr{ - pos: position{line: 511, col: 43, offset: 13815}, + pos: position{line: 525, col: 43, offset: 14497}, name: "Scalar", }, &ruleRefExpr{ - pos: position{line: 511, col: 52, offset: 13824}, + pos: position{line: 525, col: 52, offset: 14506}, name: "Ref", }, &ruleRefExpr{ - pos: position{line: 511, col: 58, offset: 13830}, + pos: position{line: 525, col: 58, offset: 14512}, name: "Var", }, }, @@ -1642,20 +1779,20 @@ var g = &grammar{ }, { name: "Comprehension", - pos: position{line: 515, col: 1, offset: 13861}, + pos: position{line: 529, col: 1, offset: 14543}, expr: &choiceExpr{ - pos: position{line: 515, col: 18, offset: 13878}, + pos: position{line: 529, col: 18, offset: 14560}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 515, col: 18, offset: 13878}, + pos: position{line: 529, col: 18, offset: 14560}, name: "ArrayComprehension", }, &ruleRefExpr{ - pos: position{line: 515, col: 39, offset: 13899}, + pos: position{line: 529, col: 39, offset: 14581}, name: "ObjectComprehension", }, &ruleRefExpr{ - pos: position{line: 515, col: 61, offset: 13921}, + pos: position{line: 529, col: 61, offset: 14603}, name: "SetComprehension", }, }, @@ -1663,57 +1800,57 @@ var g = &grammar{ }, { name: "ArrayComprehension", - pos: position{line: 517, col: 1, offset: 13939}, + pos: position{line: 531, col: 1, offset: 14621}, expr: &actionExpr{ - pos: position{line: 517, col: 23, offset: 13961}, + pos: position{line: 531, col: 23, offset: 14643}, run: (*parser).callonArrayComprehension1, expr: &seqExpr{ - pos: position{line: 517, col: 23, offset: 13961}, + pos: position{line: 531, col: 23, offset: 14643}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 517, col: 23, offset: 13961}, + pos: position{line: 531, col: 23, offset: 14643}, val: "[", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 517, col: 27, offset: 13965}, + pos: position{line: 531, col: 27, offset: 14647}, name: "_", }, &labeledExpr{ - pos: position{line: 517, col: 29, offset: 13967}, + pos: position{line: 531, col: 29, offset: 14649}, label: "term", expr: &ruleRefExpr{ - pos: position{line: 517, col: 34, offset: 13972}, + pos: position{line: 531, col: 34, offset: 14654}, name: "Term", }, }, &ruleRefExpr{ - pos: position{line: 517, col: 39, offset: 13977}, + pos: position{line: 531, col: 39, offset: 14659}, name: "_", }, &litMatcher{ - pos: position{line: 517, col: 41, offset: 13979}, + pos: position{line: 531, col: 41, offset: 14661}, val: "|", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 517, col: 45, offset: 13983}, + pos: position{line: 531, col: 45, offset: 14665}, name: "_", }, &labeledExpr{ - pos: position{line: 517, col: 47, offset: 13985}, + pos: position{line: 531, col: 47, offset: 14667}, label: "body", expr: &ruleRefExpr{ - pos: position{line: 517, col: 52, offset: 13990}, + pos: position{line: 531, col: 52, offset: 14672}, name: "WhitespaceBody", }, }, &ruleRefExpr{ - pos: position{line: 517, col: 67, offset: 14005}, + pos: position{line: 531, col: 67, offset: 14687}, name: "_", }, &litMatcher{ - pos: position{line: 517, col: 69, offset: 14007}, + pos: position{line: 531, col: 69, offset: 14689}, val: "]", ignoreCase: false, }, @@ -1723,327 +1860,78 @@ var g = &grammar{ }, { name: "ObjectComprehension", - pos: position{line: 523, col: 1, offset: 14132}, + pos: position{line: 537, col: 1, offset: 14814}, expr: &actionExpr{ - pos: position{line: 523, col: 24, offset: 14155}, + pos: position{line: 537, col: 24, offset: 14837}, run: (*parser).callonObjectComprehension1, expr: &seqExpr{ - pos: position{line: 523, col: 24, offset: 14155}, - exprs: []interface{}{ - &litMatcher{ - pos: position{line: 523, col: 24, offset: 14155}, - val: "{", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 28, offset: 14159}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 523, col: 30, offset: 14161}, - label: "key", - expr: &ruleRefExpr{ - pos: position{line: 523, col: 34, offset: 14165}, - name: "Key", - }, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 38, offset: 14169}, - name: "_", - }, - &litMatcher{ - pos: position{line: 523, col: 40, offset: 14171}, - val: ":", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 44, offset: 14175}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 523, col: 46, offset: 14177}, - label: "value", - expr: &ruleRefExpr{ - pos: position{line: 523, col: 52, offset: 14183}, - name: "Term", - }, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 58, offset: 14189}, - name: "_", - }, - &litMatcher{ - pos: position{line: 523, col: 60, offset: 14191}, - val: "|", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 64, offset: 14195}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 523, col: 66, offset: 14197}, - label: "body", - expr: &ruleRefExpr{ - pos: position{line: 523, col: 71, offset: 14202}, - name: "WhitespaceBody", - }, - }, - &ruleRefExpr{ - pos: position{line: 523, col: 86, offset: 14217}, - name: "_", - }, - &litMatcher{ - pos: position{line: 523, col: 88, offset: 14219}, - val: "}", - ignoreCase: false, - }, - }, - }, - }, - }, - { - name: "SetComprehension", - pos: position{line: 529, col: 1, offset: 14359}, - expr: &actionExpr{ - pos: position{line: 529, col: 21, offset: 14379}, - run: (*parser).callonSetComprehension1, - expr: &seqExpr{ - pos: position{line: 529, col: 21, offset: 14379}, - exprs: []interface{}{ - &litMatcher{ - pos: position{line: 529, col: 21, offset: 14379}, - val: "{", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 529, col: 25, offset: 14383}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 529, col: 27, offset: 14385}, - label: "term", - expr: &ruleRefExpr{ - pos: position{line: 529, col: 32, offset: 14390}, - name: "Term", - }, - }, - &ruleRefExpr{ - pos: position{line: 529, col: 37, offset: 14395}, - name: "_", - }, - &litMatcher{ - pos: position{line: 529, col: 39, offset: 14397}, - val: "|", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 529, col: 43, offset: 14401}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 529, col: 45, offset: 14403}, - label: "body", - expr: &ruleRefExpr{ - pos: position{line: 529, col: 50, offset: 14408}, - name: "WhitespaceBody", - }, - }, - &ruleRefExpr{ - pos: position{line: 529, col: 65, offset: 14423}, - name: "_", - }, - &litMatcher{ - pos: position{line: 529, col: 67, offset: 14425}, - val: "}", - ignoreCase: false, - }, - }, - }, - }, - }, - { - name: "Composite", - pos: position{line: 535, col: 1, offset: 14548}, - expr: &choiceExpr{ - pos: position{line: 535, col: 14, offset: 14561}, - alternatives: []interface{}{ - &ruleRefExpr{ - pos: position{line: 535, col: 14, offset: 14561}, - name: "Object", - }, - &ruleRefExpr{ - pos: position{line: 535, col: 23, offset: 14570}, - name: "Array", - }, - &ruleRefExpr{ - pos: position{line: 535, col: 31, offset: 14578}, - name: "Set", - }, - }, - }, - }, - { - name: "Scalar", - pos: position{line: 537, col: 1, offset: 14583}, - expr: &choiceExpr{ - pos: position{line: 537, col: 11, offset: 14593}, - alternatives: []interface{}{ - &ruleRefExpr{ - pos: position{line: 537, col: 11, offset: 14593}, - name: "Number", - }, - &ruleRefExpr{ - pos: position{line: 537, col: 20, offset: 14602}, - name: "String", - }, - &ruleRefExpr{ - pos: position{line: 537, col: 29, offset: 14611}, - name: "Bool", - }, - &ruleRefExpr{ - pos: position{line: 537, col: 36, offset: 14618}, - name: "Null", - }, - }, - }, - }, - { - name: "Key", - pos: position{line: 539, col: 1, offset: 14624}, - expr: &choiceExpr{ - pos: position{line: 539, col: 8, offset: 14631}, - alternatives: []interface{}{ - &ruleRefExpr{ - pos: position{line: 539, col: 8, offset: 14631}, - name: "Scalar", - }, - &ruleRefExpr{ - pos: position{line: 539, col: 17, offset: 14640}, - name: "Ref", - }, - &ruleRefExpr{ - pos: position{line: 539, col: 23, offset: 14646}, - name: "Var", - }, - }, - }, - }, - { - name: "Object", - pos: position{line: 541, col: 1, offset: 14651}, - expr: &actionExpr{ - pos: position{line: 541, col: 11, offset: 14661}, - run: (*parser).callonObject1, - expr: &seqExpr{ - pos: position{line: 541, col: 11, offset: 14661}, - exprs: []interface{}{ - &litMatcher{ - pos: position{line: 541, col: 11, offset: 14661}, - val: "{", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 541, col: 15, offset: 14665}, - name: "_", - }, - &labeledExpr{ - pos: position{line: 541, col: 17, offset: 14667}, - label: "head", - expr: &zeroOrOneExpr{ - pos: position{line: 541, col: 22, offset: 14672}, - expr: &seqExpr{ - pos: position{line: 541, col: 23, offset: 14673}, - exprs: []interface{}{ - &ruleRefExpr{ - pos: position{line: 541, col: 23, offset: 14673}, - name: "Key", - }, - &ruleRefExpr{ - pos: position{line: 541, col: 27, offset: 14677}, - name: "_", - }, - &litMatcher{ - pos: position{line: 541, col: 29, offset: 14679}, - val: ":", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 541, col: 33, offset: 14683}, - name: "_", - }, - &ruleRefExpr{ - pos: position{line: 541, col: 35, offset: 14685}, - name: "Term", - }, - }, - }, - }, - }, - &labeledExpr{ - pos: position{line: 541, col: 42, offset: 14692}, - label: "tail", - expr: &zeroOrMoreExpr{ - pos: position{line: 541, col: 47, offset: 14697}, - expr: &seqExpr{ - pos: position{line: 541, col: 49, offset: 14699}, - exprs: []interface{}{ - &ruleRefExpr{ - pos: position{line: 541, col: 49, offset: 14699}, - name: "_", - }, - &litMatcher{ - pos: position{line: 541, col: 51, offset: 14701}, - val: ",", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 541, col: 55, offset: 14705}, - name: "_", - }, - &ruleRefExpr{ - pos: position{line: 541, col: 57, offset: 14707}, - name: "Key", - }, - &ruleRefExpr{ - pos: position{line: 541, col: 61, offset: 14711}, - name: "_", - }, - &litMatcher{ - pos: position{line: 541, col: 63, offset: 14713}, - val: ":", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 541, col: 67, offset: 14717}, - name: "_", - }, - &ruleRefExpr{ - pos: position{line: 541, col: 69, offset: 14719}, - name: "Term", - }, - }, - }, + pos: position{line: 537, col: 24, offset: 14837}, + exprs: []interface{}{ + &litMatcher{ + pos: position{line: 537, col: 24, offset: 14837}, + val: "{", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 537, col: 28, offset: 14841}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 537, col: 30, offset: 14843}, + label: "key", + expr: &ruleRefExpr{ + pos: position{line: 537, col: 34, offset: 14847}, + name: "Key", }, }, &ruleRefExpr{ - pos: position{line: 541, col: 77, offset: 14727}, + pos: position{line: 537, col: 38, offset: 14851}, name: "_", }, - &zeroOrOneExpr{ - pos: position{line: 541, col: 79, offset: 14729}, - expr: &litMatcher{ - pos: position{line: 541, col: 79, offset: 14729}, - val: ",", - ignoreCase: false, + &litMatcher{ + pos: position{line: 537, col: 40, offset: 14853}, + val: ":", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 537, col: 44, offset: 14857}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 537, col: 46, offset: 14859}, + label: "value", + expr: &ruleRefExpr{ + pos: position{line: 537, col: 52, offset: 14865}, + name: "Term", + }, + }, + &ruleRefExpr{ + pos: position{line: 537, col: 58, offset: 14871}, + name: "_", + }, + &litMatcher{ + pos: position{line: 537, col: 60, offset: 14873}, + val: "|", + ignoreCase: false, + }, + &ruleRefExpr{ + pos: position{line: 537, col: 64, offset: 14877}, + name: "_", + }, + &labeledExpr{ + pos: position{line: 537, col: 66, offset: 14879}, + label: "body", + expr: &ruleRefExpr{ + pos: position{line: 537, col: 71, offset: 14884}, + name: "WhitespaceBody", }, }, &ruleRefExpr{ - pos: position{line: 541, col: 84, offset: 14734}, + pos: position{line: 537, col: 86, offset: 14899}, name: "_", }, &litMatcher{ - pos: position{line: 541, col: 86, offset: 14736}, + pos: position{line: 537, col: 88, offset: 14901}, val: "}", ignoreCase: false, }, @@ -2052,82 +1940,59 @@ var g = &grammar{ }, }, { - name: "Array", - pos: position{line: 545, col: 1, offset: 14799}, + name: "SetComprehension", + pos: position{line: 543, col: 1, offset: 15041}, expr: &actionExpr{ - pos: position{line: 545, col: 10, offset: 14808}, - run: (*parser).callonArray1, + pos: position{line: 543, col: 21, offset: 15061}, + run: (*parser).callonSetComprehension1, expr: &seqExpr{ - pos: position{line: 545, col: 10, offset: 14808}, + pos: position{line: 543, col: 21, offset: 15061}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 545, col: 10, offset: 14808}, - val: "[", + pos: position{line: 543, col: 21, offset: 15061}, + val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 545, col: 14, offset: 14812}, + pos: position{line: 543, col: 25, offset: 15065}, name: "_", }, &labeledExpr{ - pos: position{line: 545, col: 17, offset: 14815}, - label: "head", - expr: &zeroOrOneExpr{ - pos: position{line: 545, col: 22, offset: 14820}, - expr: &ruleRefExpr{ - pos: position{line: 545, col: 22, offset: 14820}, - name: "Term", - }, + pos: position{line: 543, col: 27, offset: 15067}, + label: "term", + expr: &ruleRefExpr{ + pos: position{line: 543, col: 32, offset: 15072}, + name: "Term", }, }, - &labeledExpr{ - pos: position{line: 545, col: 28, offset: 14826}, - label: "tail", - expr: &zeroOrMoreExpr{ - pos: position{line: 545, col: 33, offset: 14831}, - expr: &seqExpr{ - pos: position{line: 545, col: 34, offset: 14832}, - exprs: []interface{}{ - &ruleRefExpr{ - pos: position{line: 545, col: 34, offset: 14832}, - name: "_", - }, - &litMatcher{ - pos: position{line: 545, col: 36, offset: 14834}, - val: ",", - ignoreCase: false, - }, - &ruleRefExpr{ - pos: position{line: 545, col: 40, offset: 14838}, - name: "_", - }, - &ruleRefExpr{ - pos: position{line: 545, col: 42, offset: 14840}, - name: "Term", - }, - }, - }, - }, + &ruleRefExpr{ + pos: position{line: 543, col: 37, offset: 15077}, + name: "_", + }, + &litMatcher{ + pos: position{line: 543, col: 39, offset: 15079}, + val: "|", + ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 545, col: 49, offset: 14847}, + pos: position{line: 543, col: 43, offset: 15083}, name: "_", }, - &zeroOrOneExpr{ - pos: position{line: 545, col: 51, offset: 14849}, - expr: &litMatcher{ - pos: position{line: 545, col: 51, offset: 14849}, - val: ",", - ignoreCase: false, + &labeledExpr{ + pos: position{line: 543, col: 45, offset: 15085}, + label: "body", + expr: &ruleRefExpr{ + pos: position{line: 543, col: 50, offset: 15090}, + name: "WhitespaceBody", }, }, &ruleRefExpr{ - pos: position{line: 545, col: 56, offset: 14854}, + pos: position{line: 543, col: 65, offset: 15105}, name: "_", }, &litMatcher{ - pos: position{line: 545, col: 59, offset: 14857}, - val: "]", + pos: position{line: 543, col: 67, offset: 15107}, + val: "}", ignoreCase: false, }, }, @@ -2135,153 +2000,187 @@ var g = &grammar{ }, }, { - name: "ArgTerm", - pos: position{line: 554, col: 1, offset: 15252}, - expr: &actionExpr{ - pos: position{line: 554, col: 12, offset: 15263}, - run: (*parser).callonArgTerm1, - expr: &labeledExpr{ - pos: position{line: 554, col: 12, offset: 15263}, - label: "val", - expr: &choiceExpr{ - pos: position{line: 554, col: 17, offset: 15268}, - alternatives: []interface{}{ - &ruleRefExpr{ - pos: position{line: 554, col: 17, offset: 15268}, - name: "Scalar", - }, - &ruleRefExpr{ - pos: position{line: 554, col: 26, offset: 15277}, - name: "Var", - }, - &ruleRefExpr{ - pos: position{line: 554, col: 32, offset: 15283}, - name: "ArgObject", - }, - &ruleRefExpr{ - pos: position{line: 554, col: 44, offset: 15295}, - name: "ArgArray", - }, - }, + name: "Composite", + pos: position{line: 549, col: 1, offset: 15230}, + expr: &choiceExpr{ + pos: position{line: 549, col: 14, offset: 15243}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 549, col: 14, offset: 15243}, + name: "Object", + }, + &ruleRefExpr{ + pos: position{line: 549, col: 23, offset: 15252}, + name: "Array", + }, + &ruleRefExpr{ + pos: position{line: 549, col: 31, offset: 15260}, + name: "Set", + }, + }, + }, + }, + { + name: "Scalar", + pos: position{line: 551, col: 1, offset: 15265}, + expr: &choiceExpr{ + pos: position{line: 551, col: 11, offset: 15275}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 551, col: 11, offset: 15275}, + name: "Number", + }, + &ruleRefExpr{ + pos: position{line: 551, col: 20, offset: 15284}, + name: "String", + }, + &ruleRefExpr{ + pos: position{line: 551, col: 29, offset: 15293}, + name: "Bool", + }, + &ruleRefExpr{ + pos: position{line: 551, col: 36, offset: 15300}, + name: "Null", + }, + }, + }, + }, + { + name: "Key", + pos: position{line: 553, col: 1, offset: 15306}, + expr: &choiceExpr{ + pos: position{line: 553, col: 8, offset: 15313}, + alternatives: []interface{}{ + &ruleRefExpr{ + pos: position{line: 553, col: 8, offset: 15313}, + name: "Scalar", + }, + &ruleRefExpr{ + pos: position{line: 553, col: 17, offset: 15322}, + name: "Ref", + }, + &ruleRefExpr{ + pos: position{line: 553, col: 23, offset: 15328}, + name: "Var", }, }, }, }, { - name: "ArgObject", - pos: position{line: 558, col: 1, offset: 15330}, + name: "Object", + pos: position{line: 555, col: 1, offset: 15333}, expr: &actionExpr{ - pos: position{line: 558, col: 14, offset: 15343}, - run: (*parser).callonArgObject1, + pos: position{line: 555, col: 11, offset: 15343}, + run: (*parser).callonObject1, expr: &seqExpr{ - pos: position{line: 558, col: 14, offset: 15343}, + pos: position{line: 555, col: 11, offset: 15343}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 558, col: 14, offset: 15343}, + pos: position{line: 555, col: 11, offset: 15343}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 558, col: 18, offset: 15347}, + pos: position{line: 555, col: 15, offset: 15347}, name: "_", }, &labeledExpr{ - pos: position{line: 558, col: 20, offset: 15349}, + pos: position{line: 555, col: 17, offset: 15349}, label: "head", expr: &zeroOrOneExpr{ - pos: position{line: 558, col: 25, offset: 15354}, + pos: position{line: 555, col: 22, offset: 15354}, expr: &seqExpr{ - pos: position{line: 558, col: 26, offset: 15355}, + pos: position{line: 555, col: 23, offset: 15355}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 558, col: 26, offset: 15355}, - name: "ArgKey", + pos: position{line: 555, col: 23, offset: 15355}, + name: "Key", }, &ruleRefExpr{ - pos: position{line: 558, col: 33, offset: 15362}, + pos: position{line: 555, col: 27, offset: 15359}, name: "_", }, &litMatcher{ - pos: position{line: 558, col: 35, offset: 15364}, + pos: position{line: 555, col: 29, offset: 15361}, val: ":", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 558, col: 39, offset: 15368}, + pos: position{line: 555, col: 33, offset: 15365}, name: "_", }, &ruleRefExpr{ - pos: position{line: 558, col: 41, offset: 15370}, - name: "ArgTerm", + pos: position{line: 555, col: 35, offset: 15367}, + name: "Term", }, }, }, }, }, &labeledExpr{ - pos: position{line: 558, col: 51, offset: 15380}, + pos: position{line: 555, col: 42, offset: 15374}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 558, col: 56, offset: 15385}, + pos: position{line: 555, col: 47, offset: 15379}, expr: &seqExpr{ - pos: position{line: 558, col: 58, offset: 15387}, + pos: position{line: 555, col: 49, offset: 15381}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 558, col: 58, offset: 15387}, + pos: position{line: 555, col: 49, offset: 15381}, name: "_", }, &litMatcher{ - pos: position{line: 558, col: 60, offset: 15389}, + pos: position{line: 555, col: 51, offset: 15383}, val: ",", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 558, col: 64, offset: 15393}, + pos: position{line: 555, col: 55, offset: 15387}, name: "_", }, &ruleRefExpr{ - pos: position{line: 558, col: 66, offset: 15395}, - name: "ArgKey", + pos: position{line: 555, col: 57, offset: 15389}, + name: "Key", }, &ruleRefExpr{ - pos: position{line: 558, col: 73, offset: 15402}, + pos: position{line: 555, col: 61, offset: 15393}, name: "_", }, &litMatcher{ - pos: position{line: 558, col: 75, offset: 15404}, + pos: position{line: 555, col: 63, offset: 15395}, val: ":", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 558, col: 79, offset: 15408}, + pos: position{line: 555, col: 67, offset: 15399}, name: "_", }, &ruleRefExpr{ - pos: position{line: 558, col: 81, offset: 15410}, - name: "ArgTerm", + pos: position{line: 555, col: 69, offset: 15401}, + name: "Term", }, }, }, }, }, &ruleRefExpr{ - pos: position{line: 558, col: 92, offset: 15421}, + pos: position{line: 555, col: 77, offset: 15409}, name: "_", }, &zeroOrOneExpr{ - pos: position{line: 558, col: 94, offset: 15423}, + pos: position{line: 555, col: 79, offset: 15411}, expr: &litMatcher{ - pos: position{line: 558, col: 94, offset: 15423}, + pos: position{line: 555, col: 79, offset: 15411}, val: ",", ignoreCase: false, }, }, &ruleRefExpr{ - pos: position{line: 558, col: 99, offset: 15428}, + pos: position{line: 555, col: 84, offset: 15416}, name: "_", }, &litMatcher{ - pos: position{line: 558, col: 101, offset: 15430}, + pos: position{line: 555, col: 86, offset: 15418}, val: "}", ignoreCase: false, }, @@ -2290,89 +2189,81 @@ var g = &grammar{ }, }, { - name: "ArgKey", - pos: position{line: 562, col: 1, offset: 15493}, - expr: &ruleRefExpr{ - pos: position{line: 562, col: 11, offset: 15503}, - name: "Scalar", - }, - }, - { - name: "ArgArray", - pos: position{line: 564, col: 1, offset: 15511}, + name: "Array", + pos: position{line: 559, col: 1, offset: 15481}, expr: &actionExpr{ - pos: position{line: 564, col: 13, offset: 15523}, - run: (*parser).callonArgArray1, + pos: position{line: 559, col: 10, offset: 15490}, + run: (*parser).callonArray1, expr: &seqExpr{ - pos: position{line: 564, col: 13, offset: 15523}, + pos: position{line: 559, col: 10, offset: 15490}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 564, col: 13, offset: 15523}, + pos: position{line: 559, col: 10, offset: 15490}, val: "[", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 564, col: 17, offset: 15527}, + pos: position{line: 559, col: 14, offset: 15494}, name: "_", }, &labeledExpr{ - pos: position{line: 564, col: 20, offset: 15530}, + pos: position{line: 559, col: 17, offset: 15497}, label: "head", expr: &zeroOrOneExpr{ - pos: position{line: 564, col: 25, offset: 15535}, + pos: position{line: 559, col: 22, offset: 15502}, expr: &ruleRefExpr{ - pos: position{line: 564, col: 25, offset: 15535}, - name: "ArgTerm", + pos: position{line: 559, col: 22, offset: 15502}, + name: "Term", }, }, }, &labeledExpr{ - pos: position{line: 564, col: 34, offset: 15544}, + pos: position{line: 559, col: 28, offset: 15508}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 564, col: 39, offset: 15549}, + pos: position{line: 559, col: 33, offset: 15513}, expr: &seqExpr{ - pos: position{line: 564, col: 40, offset: 15550}, + pos: position{line: 559, col: 34, offset: 15514}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 564, col: 40, offset: 15550}, + pos: position{line: 559, col: 34, offset: 15514}, name: "_", }, &litMatcher{ - pos: position{line: 564, col: 42, offset: 15552}, + pos: position{line: 559, col: 36, offset: 15516}, val: ",", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 564, col: 46, offset: 15556}, + pos: position{line: 559, col: 40, offset: 15520}, name: "_", }, &ruleRefExpr{ - pos: position{line: 564, col: 48, offset: 15558}, - name: "ArgTerm", + pos: position{line: 559, col: 42, offset: 15522}, + name: "Term", }, }, }, }, }, &ruleRefExpr{ - pos: position{line: 564, col: 58, offset: 15568}, + pos: position{line: 559, col: 49, offset: 15529}, name: "_", }, &zeroOrOneExpr{ - pos: position{line: 564, col: 60, offset: 15570}, + pos: position{line: 559, col: 51, offset: 15531}, expr: &litMatcher{ - pos: position{line: 564, col: 60, offset: 15570}, + pos: position{line: 559, col: 51, offset: 15531}, val: ",", ignoreCase: false, }, }, &ruleRefExpr{ - pos: position{line: 564, col: 65, offset: 15575}, + pos: position{line: 559, col: 56, offset: 15536}, name: "_", }, &litMatcher{ - pos: position{line: 564, col: 68, offset: 15578}, + pos: position{line: 559, col: 59, offset: 15539}, val: "]", ignoreCase: false, }, @@ -2382,16 +2273,16 @@ var g = &grammar{ }, { name: "Set", - pos: position{line: 568, col: 1, offset: 15640}, + pos: position{line: 563, col: 1, offset: 15601}, expr: &choiceExpr{ - pos: position{line: 568, col: 8, offset: 15647}, + pos: position{line: 563, col: 8, offset: 15608}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 568, col: 8, offset: 15647}, + pos: position{line: 563, col: 8, offset: 15608}, name: "SetEmpty", }, &ruleRefExpr{ - pos: position{line: 568, col: 19, offset: 15658}, + pos: position{line: 563, col: 19, offset: 15619}, name: "SetNonEmpty", }, }, @@ -2399,24 +2290,24 @@ var g = &grammar{ }, { name: "SetEmpty", - pos: position{line: 570, col: 1, offset: 15671}, + pos: position{line: 565, col: 1, offset: 15632}, expr: &actionExpr{ - pos: position{line: 570, col: 13, offset: 15683}, + pos: position{line: 565, col: 13, offset: 15644}, run: (*parser).callonSetEmpty1, expr: &seqExpr{ - pos: position{line: 570, col: 13, offset: 15683}, + pos: position{line: 565, col: 13, offset: 15644}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 570, col: 13, offset: 15683}, + pos: position{line: 565, col: 13, offset: 15644}, val: "set(", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 570, col: 20, offset: 15690}, + pos: position{line: 565, col: 20, offset: 15651}, name: "_", }, &litMatcher{ - pos: position{line: 570, col: 22, offset: 15692}, + pos: position{line: 565, col: 22, offset: 15653}, val: ")", ignoreCase: false, }, @@ -2426,53 +2317,53 @@ var g = &grammar{ }, { name: "SetNonEmpty", - pos: position{line: 576, col: 1, offset: 15780}, + pos: position{line: 571, col: 1, offset: 15741}, expr: &actionExpr{ - pos: position{line: 576, col: 16, offset: 15795}, + pos: position{line: 571, col: 16, offset: 15756}, run: (*parser).callonSetNonEmpty1, expr: &seqExpr{ - pos: position{line: 576, col: 16, offset: 15795}, + pos: position{line: 571, col: 16, offset: 15756}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 576, col: 16, offset: 15795}, + pos: position{line: 571, col: 16, offset: 15756}, val: "{", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 576, col: 20, offset: 15799}, + pos: position{line: 571, col: 20, offset: 15760}, name: "_", }, &labeledExpr{ - pos: position{line: 576, col: 22, offset: 15801}, + pos: position{line: 571, col: 22, offset: 15762}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 576, col: 27, offset: 15806}, + pos: position{line: 571, col: 27, offset: 15767}, name: "Term", }, }, &labeledExpr{ - pos: position{line: 576, col: 32, offset: 15811}, + pos: position{line: 571, col: 32, offset: 15772}, label: "tail", expr: &zeroOrMoreExpr{ - pos: position{line: 576, col: 37, offset: 15816}, + pos: position{line: 571, col: 37, offset: 15777}, expr: &seqExpr{ - pos: position{line: 576, col: 38, offset: 15817}, + pos: position{line: 571, col: 38, offset: 15778}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 576, col: 38, offset: 15817}, + pos: position{line: 571, col: 38, offset: 15778}, name: "_", }, &litMatcher{ - pos: position{line: 576, col: 40, offset: 15819}, + pos: position{line: 571, col: 40, offset: 15780}, val: ",", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 576, col: 44, offset: 15823}, + pos: position{line: 571, col: 44, offset: 15784}, name: "_", }, &ruleRefExpr{ - pos: position{line: 576, col: 46, offset: 15825}, + pos: position{line: 571, col: 46, offset: 15786}, name: "Term", }, }, @@ -2480,23 +2371,23 @@ var g = &grammar{ }, }, &ruleRefExpr{ - pos: position{line: 576, col: 53, offset: 15832}, + pos: position{line: 571, col: 53, offset: 15793}, name: "_", }, &zeroOrOneExpr{ - pos: position{line: 576, col: 55, offset: 15834}, + pos: position{line: 571, col: 55, offset: 15795}, expr: &litMatcher{ - pos: position{line: 576, col: 55, offset: 15834}, + pos: position{line: 571, col: 55, offset: 15795}, val: ",", ignoreCase: false, }, }, &ruleRefExpr{ - pos: position{line: 576, col: 60, offset: 15839}, + pos: position{line: 571, col: 60, offset: 15800}, name: "_", }, &litMatcher{ - pos: position{line: 576, col: 62, offset: 15841}, + pos: position{line: 571, col: 62, offset: 15802}, val: "}", ignoreCase: false, }, @@ -2506,35 +2397,35 @@ var g = &grammar{ }, { name: "Ref", - pos: position{line: 593, col: 1, offset: 16246}, + pos: position{line: 588, col: 1, offset: 16207}, expr: &actionExpr{ - pos: position{line: 593, col: 8, offset: 16253}, + pos: position{line: 588, col: 8, offset: 16214}, run: (*parser).callonRef1, expr: &seqExpr{ - pos: position{line: 593, col: 8, offset: 16253}, + pos: position{line: 588, col: 8, offset: 16214}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 593, col: 8, offset: 16253}, + pos: position{line: 588, col: 8, offset: 16214}, label: "head", expr: &ruleRefExpr{ - pos: position{line: 593, col: 13, offset: 16258}, + pos: position{line: 588, col: 13, offset: 16219}, name: "Var", }, }, &labeledExpr{ - pos: position{line: 593, col: 17, offset: 16262}, + pos: position{line: 588, col: 17, offset: 16223}, label: "tail", expr: &oneOrMoreExpr{ - pos: position{line: 593, col: 22, offset: 16267}, + pos: position{line: 588, col: 22, offset: 16228}, expr: &choiceExpr{ - pos: position{line: 593, col: 24, offset: 16269}, + pos: position{line: 588, col: 24, offset: 16230}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 593, col: 24, offset: 16269}, + pos: position{line: 588, col: 24, offset: 16230}, name: "RefDot", }, &ruleRefExpr{ - pos: position{line: 593, col: 33, offset: 16278}, + pos: position{line: 588, col: 33, offset: 16239}, name: "RefBracket", }, }, @@ -2547,23 +2438,23 @@ var g = &grammar{ }, { name: "RefDot", - pos: position{line: 606, col: 1, offset: 16517}, + pos: position{line: 601, col: 1, offset: 16478}, expr: &actionExpr{ - pos: position{line: 606, col: 11, offset: 16527}, + pos: position{line: 601, col: 11, offset: 16488}, run: (*parser).callonRefDot1, expr: &seqExpr{ - pos: position{line: 606, col: 11, offset: 16527}, + pos: position{line: 601, col: 11, offset: 16488}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 606, col: 11, offset: 16527}, + pos: position{line: 601, col: 11, offset: 16488}, val: ".", ignoreCase: false, }, &labeledExpr{ - pos: position{line: 606, col: 15, offset: 16531}, + pos: position{line: 601, col: 15, offset: 16492}, label: "val", expr: &ruleRefExpr{ - pos: position{line: 606, col: 19, offset: 16535}, + pos: position{line: 601, col: 19, offset: 16496}, name: "Var", }, }, @@ -2573,45 +2464,45 @@ var g = &grammar{ }, { name: "RefBracket", - pos: position{line: 613, col: 1, offset: 16754}, + pos: position{line: 608, col: 1, offset: 16715}, expr: &actionExpr{ - pos: position{line: 613, col: 15, offset: 16768}, + pos: position{line: 608, col: 15, offset: 16729}, run: (*parser).callonRefBracket1, expr: &seqExpr{ - pos: position{line: 613, col: 15, offset: 16768}, + pos: position{line: 608, col: 15, offset: 16729}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 613, col: 15, offset: 16768}, + pos: position{line: 608, col: 15, offset: 16729}, val: "[", ignoreCase: false, }, &labeledExpr{ - pos: position{line: 613, col: 19, offset: 16772}, + pos: position{line: 608, col: 19, offset: 16733}, label: "val", expr: &choiceExpr{ - pos: position{line: 613, col: 24, offset: 16777}, + pos: position{line: 608, col: 24, offset: 16738}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 613, col: 24, offset: 16777}, + pos: position{line: 608, col: 24, offset: 16738}, name: "Composite", }, &ruleRefExpr{ - pos: position{line: 613, col: 36, offset: 16789}, + pos: position{line: 608, col: 36, offset: 16750}, name: "Ref", }, &ruleRefExpr{ - pos: position{line: 613, col: 42, offset: 16795}, + pos: position{line: 608, col: 42, offset: 16756}, name: "Scalar", }, &ruleRefExpr{ - pos: position{line: 613, col: 51, offset: 16804}, + pos: position{line: 608, col: 51, offset: 16765}, name: "Var", }, }, }, }, &litMatcher{ - pos: position{line: 613, col: 56, offset: 16809}, + pos: position{line: 608, col: 56, offset: 16770}, val: "]", ignoreCase: false, }, @@ -2621,15 +2512,15 @@ var g = &grammar{ }, { name: "Var", - pos: position{line: 617, col: 1, offset: 16838}, + pos: position{line: 612, col: 1, offset: 16799}, expr: &actionExpr{ - pos: position{line: 617, col: 8, offset: 16845}, + pos: position{line: 612, col: 8, offset: 16806}, run: (*parser).callonVar1, expr: &labeledExpr{ - pos: position{line: 617, col: 8, offset: 16845}, + pos: position{line: 612, col: 8, offset: 16806}, label: "val", expr: &ruleRefExpr{ - pos: position{line: 617, col: 12, offset: 16849}, + pos: position{line: 612, col: 12, offset: 16810}, name: "VarChecked", }, }, @@ -2637,20 +2528,20 @@ var g = &grammar{ }, { name: "VarChecked", - pos: position{line: 622, col: 1, offset: 16971}, + pos: position{line: 617, col: 1, offset: 16932}, expr: &seqExpr{ - pos: position{line: 622, col: 15, offset: 16985}, + pos: position{line: 617, col: 15, offset: 16946}, exprs: []interface{}{ &labeledExpr{ - pos: position{line: 622, col: 15, offset: 16985}, + pos: position{line: 617, col: 15, offset: 16946}, label: "val", expr: &ruleRefExpr{ - pos: position{line: 622, col: 19, offset: 16989}, + pos: position{line: 617, col: 19, offset: 16950}, name: "VarUnchecked", }, }, ¬CodeExpr{ - pos: position{line: 622, col: 32, offset: 17002}, + pos: position{line: 617, col: 32, offset: 16963}, run: (*parser).callonVarChecked4, }, }, @@ -2658,28 +2549,28 @@ var g = &grammar{ }, { name: "VarUnchecked", - pos: position{line: 626, col: 1, offset: 17067}, + pos: position{line: 621, col: 1, offset: 17028}, expr: &actionExpr{ - pos: position{line: 626, col: 17, offset: 17083}, + pos: position{line: 621, col: 17, offset: 17044}, run: (*parser).callonVarUnchecked1, expr: &seqExpr{ - pos: position{line: 626, col: 17, offset: 17083}, + pos: position{line: 621, col: 17, offset: 17044}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 626, col: 17, offset: 17083}, + pos: position{line: 621, col: 17, offset: 17044}, name: "AsciiLetter", }, &zeroOrMoreExpr{ - pos: position{line: 626, col: 29, offset: 17095}, + pos: position{line: 621, col: 29, offset: 17056}, expr: &choiceExpr{ - pos: position{line: 626, col: 30, offset: 17096}, + pos: position{line: 621, col: 30, offset: 17057}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 626, col: 30, offset: 17096}, + pos: position{line: 621, col: 30, offset: 17057}, name: "AsciiLetter", }, &ruleRefExpr{ - pos: position{line: 626, col: 44, offset: 17110}, + pos: position{line: 621, col: 44, offset: 17071}, name: "DecimalDigit", }, }, @@ -2691,30 +2582,30 @@ var g = &grammar{ }, { name: "Number", - pos: position{line: 633, col: 1, offset: 17253}, + pos: position{line: 628, col: 1, offset: 17214}, expr: &actionExpr{ - pos: position{line: 633, col: 11, offset: 17263}, + pos: position{line: 628, col: 11, offset: 17224}, run: (*parser).callonNumber1, expr: &seqExpr{ - pos: position{line: 633, col: 11, offset: 17263}, + pos: position{line: 628, col: 11, offset: 17224}, exprs: []interface{}{ &zeroOrOneExpr{ - pos: position{line: 633, col: 11, offset: 17263}, + pos: position{line: 628, col: 11, offset: 17224}, expr: &litMatcher{ - pos: position{line: 633, col: 11, offset: 17263}, + pos: position{line: 628, col: 11, offset: 17224}, val: "-", ignoreCase: false, }, }, &choiceExpr{ - pos: position{line: 633, col: 18, offset: 17270}, + pos: position{line: 628, col: 18, offset: 17231}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 633, col: 18, offset: 17270}, + pos: position{line: 628, col: 18, offset: 17231}, name: "Float", }, &ruleRefExpr{ - pos: position{line: 633, col: 26, offset: 17278}, + pos: position{line: 628, col: 26, offset: 17239}, name: "Integer", }, }, @@ -2725,16 +2616,16 @@ var g = &grammar{ }, { name: "Float", - pos: position{line: 646, col: 1, offset: 17669}, + pos: position{line: 641, col: 1, offset: 17630}, expr: &choiceExpr{ - pos: position{line: 646, col: 10, offset: 17678}, + pos: position{line: 641, col: 10, offset: 17639}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 646, col: 10, offset: 17678}, + pos: position{line: 641, col: 10, offset: 17639}, name: "ExponentFloat", }, &ruleRefExpr{ - pos: position{line: 646, col: 26, offset: 17694}, + pos: position{line: 641, col: 26, offset: 17655}, name: "PointFloat", }, }, @@ -2742,25 +2633,25 @@ var g = &grammar{ }, { name: "ExponentFloat", - pos: position{line: 648, col: 1, offset: 17706}, + pos: position{line: 643, col: 1, offset: 17667}, expr: &seqExpr{ - pos: position{line: 648, col: 18, offset: 17723}, + pos: position{line: 643, col: 18, offset: 17684}, exprs: []interface{}{ &choiceExpr{ - pos: position{line: 648, col: 20, offset: 17725}, + pos: position{line: 643, col: 20, offset: 17686}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 648, col: 20, offset: 17725}, + pos: position{line: 643, col: 20, offset: 17686}, name: "PointFloat", }, &ruleRefExpr{ - pos: position{line: 648, col: 33, offset: 17738}, + pos: position{line: 643, col: 33, offset: 17699}, name: "Integer", }, }, }, &ruleRefExpr{ - pos: position{line: 648, col: 43, offset: 17748}, + pos: position{line: 643, col: 43, offset: 17709}, name: "Exponent", }, }, @@ -2768,19 +2659,19 @@ var g = &grammar{ }, { name: "PointFloat", - pos: position{line: 650, col: 1, offset: 17758}, + pos: position{line: 645, col: 1, offset: 17719}, expr: &seqExpr{ - pos: position{line: 650, col: 15, offset: 17772}, + pos: position{line: 645, col: 15, offset: 17733}, exprs: []interface{}{ &zeroOrOneExpr{ - pos: position{line: 650, col: 15, offset: 17772}, + pos: position{line: 645, col: 15, offset: 17733}, expr: &ruleRefExpr{ - pos: position{line: 650, col: 15, offset: 17772}, + pos: position{line: 645, col: 15, offset: 17733}, name: "Integer", }, }, &ruleRefExpr{ - pos: position{line: 650, col: 24, offset: 17781}, + pos: position{line: 645, col: 24, offset: 17742}, name: "Fraction", }, }, @@ -2788,19 +2679,19 @@ var g = &grammar{ }, { name: "Fraction", - pos: position{line: 652, col: 1, offset: 17791}, + pos: position{line: 647, col: 1, offset: 17752}, expr: &seqExpr{ - pos: position{line: 652, col: 13, offset: 17803}, + pos: position{line: 647, col: 13, offset: 17764}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 652, col: 13, offset: 17803}, + pos: position{line: 647, col: 13, offset: 17764}, val: ".", ignoreCase: false, }, &oneOrMoreExpr{ - pos: position{line: 652, col: 17, offset: 17807}, + pos: position{line: 647, col: 17, offset: 17768}, expr: &ruleRefExpr{ - pos: position{line: 652, col: 17, offset: 17807}, + pos: position{line: 647, col: 17, offset: 17768}, name: "DecimalDigit", }, }, @@ -2809,19 +2700,19 @@ var g = &grammar{ }, { name: "Exponent", - pos: position{line: 654, col: 1, offset: 17822}, + pos: position{line: 649, col: 1, offset: 17783}, expr: &seqExpr{ - pos: position{line: 654, col: 13, offset: 17834}, + pos: position{line: 649, col: 13, offset: 17795}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 654, col: 13, offset: 17834}, + pos: position{line: 649, col: 13, offset: 17795}, val: "e", ignoreCase: true, }, &zeroOrOneExpr{ - pos: position{line: 654, col: 18, offset: 17839}, + pos: position{line: 649, col: 18, offset: 17800}, expr: &charClassMatcher{ - pos: position{line: 654, col: 18, offset: 17839}, + pos: position{line: 649, col: 18, offset: 17800}, val: "[+-]", chars: []rune{'+', '-'}, ignoreCase: false, @@ -2829,9 +2720,9 @@ var g = &grammar{ }, }, &oneOrMoreExpr{ - pos: position{line: 654, col: 24, offset: 17845}, + pos: position{line: 649, col: 24, offset: 17806}, expr: &ruleRefExpr{ - pos: position{line: 654, col: 24, offset: 17845}, + pos: position{line: 649, col: 24, offset: 17806}, name: "DecimalDigit", }, }, @@ -2840,26 +2731,26 @@ var g = &grammar{ }, { name: "Integer", - pos: position{line: 656, col: 1, offset: 17860}, + pos: position{line: 651, col: 1, offset: 17821}, expr: &choiceExpr{ - pos: position{line: 656, col: 12, offset: 17871}, + pos: position{line: 651, col: 12, offset: 17832}, alternatives: []interface{}{ &litMatcher{ - pos: position{line: 656, col: 12, offset: 17871}, + pos: position{line: 651, col: 12, offset: 17832}, val: "0", ignoreCase: false, }, &seqExpr{ - pos: position{line: 656, col: 20, offset: 17879}, + pos: position{line: 651, col: 20, offset: 17840}, exprs: []interface{}{ &ruleRefExpr{ - pos: position{line: 656, col: 20, offset: 17879}, + pos: position{line: 651, col: 20, offset: 17840}, name: "NonZeroDecimalDigit", }, &zeroOrMoreExpr{ - pos: position{line: 656, col: 40, offset: 17899}, + pos: position{line: 651, col: 40, offset: 17860}, expr: &ruleRefExpr{ - pos: position{line: 656, col: 40, offset: 17899}, + pos: position{line: 651, col: 40, offset: 17860}, name: "DecimalDigit", }, }, @@ -2870,16 +2761,16 @@ var g = &grammar{ }, { name: "String", - pos: position{line: 658, col: 1, offset: 17916}, + pos: position{line: 653, col: 1, offset: 17877}, expr: &choiceExpr{ - pos: position{line: 658, col: 11, offset: 17926}, + pos: position{line: 653, col: 11, offset: 17887}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 658, col: 11, offset: 17926}, + pos: position{line: 653, col: 11, offset: 17887}, name: "QuotedString", }, &ruleRefExpr{ - pos: position{line: 658, col: 26, offset: 17941}, + pos: position{line: 653, col: 26, offset: 17902}, name: "RawString", }, }, @@ -2887,27 +2778,27 @@ var g = &grammar{ }, { name: "QuotedString", - pos: position{line: 660, col: 1, offset: 17952}, + pos: position{line: 655, col: 1, offset: 17913}, expr: &actionExpr{ - pos: position{line: 660, col: 17, offset: 17968}, + pos: position{line: 655, col: 17, offset: 17929}, run: (*parser).callonQuotedString1, expr: &seqExpr{ - pos: position{line: 660, col: 17, offset: 17968}, + pos: position{line: 655, col: 17, offset: 17929}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 660, col: 17, offset: 17968}, + pos: position{line: 655, col: 17, offset: 17929}, val: "\"", ignoreCase: false, }, &zeroOrMoreExpr{ - pos: position{line: 660, col: 21, offset: 17972}, + pos: position{line: 655, col: 21, offset: 17933}, expr: &ruleRefExpr{ - pos: position{line: 660, col: 21, offset: 17972}, + pos: position{line: 655, col: 21, offset: 17933}, name: "Char", }, }, &litMatcher{ - pos: position{line: 660, col: 27, offset: 17978}, + pos: position{line: 655, col: 27, offset: 17939}, val: "\"", ignoreCase: false, }, @@ -2917,22 +2808,22 @@ var g = &grammar{ }, { name: "RawString", - pos: position{line: 668, col: 1, offset: 18133}, + pos: position{line: 663, col: 1, offset: 18094}, expr: &actionExpr{ - pos: position{line: 668, col: 14, offset: 18146}, + pos: position{line: 663, col: 14, offset: 18107}, run: (*parser).callonRawString1, expr: &seqExpr{ - pos: position{line: 668, col: 14, offset: 18146}, + pos: position{line: 663, col: 14, offset: 18107}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 668, col: 14, offset: 18146}, + pos: position{line: 663, col: 14, offset: 18107}, val: "`", ignoreCase: false, }, &zeroOrMoreExpr{ - pos: position{line: 668, col: 18, offset: 18150}, + pos: position{line: 663, col: 18, offset: 18111}, expr: &charClassMatcher{ - pos: position{line: 668, col: 18, offset: 18150}, + pos: position{line: 663, col: 18, offset: 18111}, val: "[^`]", chars: []rune{'`'}, ignoreCase: false, @@ -2940,7 +2831,7 @@ var g = &grammar{ }, }, &litMatcher{ - pos: position{line: 668, col: 24, offset: 18156}, + pos: position{line: 663, col: 24, offset: 18117}, val: "`", ignoreCase: false, }, @@ -2950,24 +2841,24 @@ var g = &grammar{ }, { name: "Bool", - pos: position{line: 677, col: 1, offset: 18323}, + pos: position{line: 672, col: 1, offset: 18284}, expr: &choiceExpr{ - pos: position{line: 677, col: 9, offset: 18331}, + pos: position{line: 672, col: 9, offset: 18292}, alternatives: []interface{}{ &actionExpr{ - pos: position{line: 677, col: 9, offset: 18331}, + pos: position{line: 672, col: 9, offset: 18292}, run: (*parser).callonBool2, expr: &litMatcher{ - pos: position{line: 677, col: 9, offset: 18331}, + pos: position{line: 672, col: 9, offset: 18292}, val: "true", ignoreCase: false, }, }, &actionExpr{ - pos: position{line: 681, col: 5, offset: 18431}, + pos: position{line: 676, col: 5, offset: 18392}, run: (*parser).callonBool4, expr: &litMatcher{ - pos: position{line: 681, col: 5, offset: 18431}, + pos: position{line: 676, col: 5, offset: 18392}, val: "false", ignoreCase: false, }, @@ -2977,12 +2868,12 @@ var g = &grammar{ }, { name: "Null", - pos: position{line: 687, col: 1, offset: 18532}, + pos: position{line: 682, col: 1, offset: 18493}, expr: &actionExpr{ - pos: position{line: 687, col: 9, offset: 18540}, + pos: position{line: 682, col: 9, offset: 18501}, run: (*parser).callonNull1, expr: &litMatcher{ - pos: position{line: 687, col: 9, offset: 18540}, + pos: position{line: 682, col: 9, offset: 18501}, val: "null", ignoreCase: false, }, @@ -2990,9 +2881,9 @@ var g = &grammar{ }, { name: "AsciiLetter", - pos: position{line: 693, col: 1, offset: 18635}, + pos: position{line: 688, col: 1, offset: 18596}, expr: &charClassMatcher{ - pos: position{line: 693, col: 16, offset: 18650}, + pos: position{line: 688, col: 16, offset: 18611}, val: "[A-Za-z_]", chars: []rune{'_'}, ranges: []rune{'A', 'Z', 'a', 'z'}, @@ -3002,35 +2893,35 @@ var g = &grammar{ }, { name: "Char", - pos: position{line: 695, col: 1, offset: 18661}, + pos: position{line: 690, col: 1, offset: 18622}, expr: &choiceExpr{ - pos: position{line: 695, col: 9, offset: 18669}, + pos: position{line: 690, col: 9, offset: 18630}, alternatives: []interface{}{ &seqExpr{ - pos: position{line: 695, col: 11, offset: 18671}, + pos: position{line: 690, col: 11, offset: 18632}, exprs: []interface{}{ ¬Expr{ - pos: position{line: 695, col: 11, offset: 18671}, + pos: position{line: 690, col: 11, offset: 18632}, expr: &ruleRefExpr{ - pos: position{line: 695, col: 12, offset: 18672}, + pos: position{line: 690, col: 12, offset: 18633}, name: "EscapedChar", }, }, &anyMatcher{ - line: 695, col: 24, offset: 18684, + line: 690, col: 24, offset: 18645, }, }, }, &seqExpr{ - pos: position{line: 695, col: 32, offset: 18692}, + pos: position{line: 690, col: 32, offset: 18653}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 695, col: 32, offset: 18692}, + pos: position{line: 690, col: 32, offset: 18653}, val: "\\", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 695, col: 37, offset: 18697}, + pos: position{line: 690, col: 37, offset: 18658}, name: "EscapeSequence", }, }, @@ -3040,9 +2931,9 @@ var g = &grammar{ }, { name: "EscapedChar", - pos: position{line: 697, col: 1, offset: 18715}, + pos: position{line: 692, col: 1, offset: 18676}, expr: &charClassMatcher{ - pos: position{line: 697, col: 16, offset: 18730}, + pos: position{line: 692, col: 16, offset: 18691}, val: "[\\x00-\\x1f\"\\\\]", chars: []rune{'"', '\\'}, ranges: []rune{'\x00', '\x1f'}, @@ -3052,16 +2943,16 @@ var g = &grammar{ }, { name: "EscapeSequence", - pos: position{line: 699, col: 1, offset: 18746}, + pos: position{line: 694, col: 1, offset: 18707}, expr: &choiceExpr{ - pos: position{line: 699, col: 19, offset: 18764}, + pos: position{line: 694, col: 19, offset: 18725}, alternatives: []interface{}{ &ruleRefExpr{ - pos: position{line: 699, col: 19, offset: 18764}, + pos: position{line: 694, col: 19, offset: 18725}, name: "SingleCharEscape", }, &ruleRefExpr{ - pos: position{line: 699, col: 38, offset: 18783}, + pos: position{line: 694, col: 38, offset: 18744}, name: "UnicodeEscape", }, }, @@ -3069,9 +2960,9 @@ var g = &grammar{ }, { name: "SingleCharEscape", - pos: position{line: 701, col: 1, offset: 18798}, + pos: position{line: 696, col: 1, offset: 18759}, expr: &charClassMatcher{ - pos: position{line: 701, col: 21, offset: 18818}, + pos: position{line: 696, col: 21, offset: 18779}, val: "[ \" \\\\ / b f n r t ]", chars: []rune{' ', '"', ' ', '\\', ' ', '/', ' ', 'b', ' ', 'f', ' ', 'n', ' ', 'r', ' ', 't', ' '}, ignoreCase: false, @@ -3080,29 +2971,29 @@ var g = &grammar{ }, { name: "UnicodeEscape", - pos: position{line: 703, col: 1, offset: 18840}, + pos: position{line: 698, col: 1, offset: 18801}, expr: &seqExpr{ - pos: position{line: 703, col: 18, offset: 18857}, + pos: position{line: 698, col: 18, offset: 18818}, exprs: []interface{}{ &litMatcher{ - pos: position{line: 703, col: 18, offset: 18857}, + pos: position{line: 698, col: 18, offset: 18818}, val: "u", ignoreCase: false, }, &ruleRefExpr{ - pos: position{line: 703, col: 22, offset: 18861}, + pos: position{line: 698, col: 22, offset: 18822}, name: "HexDigit", }, &ruleRefExpr{ - pos: position{line: 703, col: 31, offset: 18870}, + pos: position{line: 698, col: 31, offset: 18831}, name: "HexDigit", }, &ruleRefExpr{ - pos: position{line: 703, col: 40, offset: 18879}, + pos: position{line: 698, col: 40, offset: 18840}, name: "HexDigit", }, &ruleRefExpr{ - pos: position{line: 703, col: 49, offset: 18888}, + pos: position{line: 698, col: 49, offset: 18849}, name: "HexDigit", }, }, @@ -3110,9 +3001,9 @@ var g = &grammar{ }, { name: "DecimalDigit", - pos: position{line: 705, col: 1, offset: 18898}, + pos: position{line: 700, col: 1, offset: 18859}, expr: &charClassMatcher{ - pos: position{line: 705, col: 17, offset: 18914}, + pos: position{line: 700, col: 17, offset: 18875}, val: "[0-9]", ranges: []rune{'0', '9'}, ignoreCase: false, @@ -3121,9 +3012,9 @@ var g = &grammar{ }, { name: "NonZeroDecimalDigit", - pos: position{line: 707, col: 1, offset: 18921}, + pos: position{line: 702, col: 1, offset: 18882}, expr: &charClassMatcher{ - pos: position{line: 707, col: 24, offset: 18944}, + pos: position{line: 702, col: 24, offset: 18905}, val: "[1-9]", ranges: []rune{'1', '9'}, ignoreCase: false, @@ -3132,9 +3023,9 @@ var g = &grammar{ }, { name: "HexDigit", - pos: position{line: 709, col: 1, offset: 18951}, + pos: position{line: 704, col: 1, offset: 18912}, expr: &charClassMatcher{ - pos: position{line: 709, col: 13, offset: 18963}, + pos: position{line: 704, col: 13, offset: 18924}, val: "[0-9a-fA-F]", ranges: []rune{'0', '9', 'a', 'f', 'A', 'F'}, ignoreCase: false, @@ -3144,11 +3035,11 @@ var g = &grammar{ { name: "ws", displayName: "\"whitespace\"", - pos: position{line: 711, col: 1, offset: 18976}, + pos: position{line: 706, col: 1, offset: 18937}, expr: &oneOrMoreExpr{ - pos: position{line: 711, col: 20, offset: 18995}, + pos: position{line: 706, col: 20, offset: 18956}, expr: &charClassMatcher{ - pos: position{line: 711, col: 20, offset: 18995}, + pos: position{line: 706, col: 20, offset: 18956}, val: "[ \\t\\r\\n]", chars: []rune{' ', '\t', '\r', '\n'}, ignoreCase: false, @@ -3159,21 +3050,21 @@ var g = &grammar{ { name: "_", displayName: "\"whitespace\"", - pos: position{line: 713, col: 1, offset: 19007}, + pos: position{line: 708, col: 1, offset: 18968}, expr: &zeroOrMoreExpr{ - pos: position{line: 713, col: 19, offset: 19025}, + pos: position{line: 708, col: 19, offset: 18986}, expr: &choiceExpr{ - pos: position{line: 713, col: 21, offset: 19027}, + pos: position{line: 708, col: 21, offset: 18988}, alternatives: []interface{}{ &charClassMatcher{ - pos: position{line: 713, col: 21, offset: 19027}, + pos: position{line: 708, col: 21, offset: 18988}, val: "[ \\t\\r\\n]", chars: []rune{' ', '\t', '\r', '\n'}, ignoreCase: false, inverted: false, }, &ruleRefExpr{ - pos: position{line: 713, col: 33, offset: 19039}, + pos: position{line: 708, col: 33, offset: 19000}, name: "Comment", }, }, @@ -3182,17 +3073,17 @@ var g = &grammar{ }, { name: "Comment", - pos: position{line: 715, col: 1, offset: 19051}, + pos: position{line: 710, col: 1, offset: 19012}, expr: &actionExpr{ - pos: position{line: 715, col: 12, offset: 19062}, + pos: position{line: 710, col: 12, offset: 19023}, run: (*parser).callonComment1, expr: &seqExpr{ - pos: position{line: 715, col: 12, offset: 19062}, + pos: position{line: 710, col: 12, offset: 19023}, exprs: []interface{}{ &zeroOrMoreExpr{ - pos: position{line: 715, col: 12, offset: 19062}, + pos: position{line: 710, col: 12, offset: 19023}, expr: &charClassMatcher{ - pos: position{line: 715, col: 12, offset: 19062}, + pos: position{line: 710, col: 12, offset: 19023}, val: "[ \\t]", chars: []rune{' ', '\t'}, ignoreCase: false, @@ -3200,17 +3091,17 @@ var g = &grammar{ }, }, &litMatcher{ - pos: position{line: 715, col: 19, offset: 19069}, + pos: position{line: 710, col: 19, offset: 19030}, val: "#", ignoreCase: false, }, &labeledExpr{ - pos: position{line: 715, col: 23, offset: 19073}, + pos: position{line: 710, col: 23, offset: 19034}, label: "text", expr: &zeroOrMoreExpr{ - pos: position{line: 715, col: 28, offset: 19078}, + pos: position{line: 710, col: 28, offset: 19039}, expr: &charClassMatcher{ - pos: position{line: 715, col: 28, offset: 19078}, + pos: position{line: 710, col: 28, offset: 19039}, val: "[^\\r\\n]", chars: []rune{'\r', '\n'}, ignoreCase: false, @@ -3224,11 +3115,11 @@ var g = &grammar{ }, { name: "EOF", - pos: position{line: 726, col: 1, offset: 19354}, + pos: position{line: 721, col: 1, offset: 19315}, expr: ¬Expr{ - pos: position{line: 726, col: 8, offset: 19361}, + pos: position{line: 721, col: 8, offset: 19322}, expr: &anyMatcher{ - line: 726, col: 9, offset: 19362, + line: 721, col: 9, offset: 19323, }, }, }, @@ -3422,6 +3313,7 @@ func (c *current) onNormalRules1(head, b interface{}) (interface{}, error) { Location: re.loc, Head: &Head{ Name: prev.Head.Name, + Args: prev.Head.Args.Copy(), Value: re.term, Location: re.term.Location, }, @@ -3441,83 +3333,22 @@ func (p *parser) callonNormalRules1() (interface{}, error) { return p.cur.onNormalRules1(stack["head"], stack["b"]) } -func (c *current) onUserFunc1(head, b interface{}) (interface{}, error) { - - if head == nil { - return nil, nil - } - - f := &Func{ - Location: currentLocation(c), - Head: head.(*FuncHead), - Body: b.(Body), - } - - return f, nil -} - -func (p *parser) callonUserFunc1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onUserFunc1(stack["head"], stack["b"]) -} - -func (c *current) onFuncHead1(name, args, output interface{}) (interface{}, error) { +func (c *current) onRuleHead1(name, args, key, value interface{}) (interface{}, error) { - head := &FuncHead{} + head := &Head{} head.Location = currentLocation(c) head.Name = name.(*Term).Value.(Var) - head.Args = args.(Args) - - if output != nil { - valueSlice := output.([]interface{}) - head.Output = valueSlice[len(valueSlice)-1].(*Term) - } - - return head, nil -} - -func (p *parser) callonFuncHead1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onFuncHead1(stack["name"], stack["args"], stack["output"]) -} -func (c *current) onFuncArgs1(head, tail interface{}) (interface{}, error) { - args := Args{} - if head == nil { - return args, nil + if args != nil && key != nil { + return nil, fmt.Errorf("partial %v/%v %vs cannot take arguments", SetTypeName, ObjectTypeName, RuleTypeName) } - first := head.(*Term) - first.Location = currentLocation(c) - args = append(args, first) - - tailSlice := tail.([]interface{}) - for _, v := range tailSlice { - s := v.([]interface{}) - arg := s[len(s)-1].(*Term) - arg.Location = currentLocation(c) - args = append(args, arg) + if args != nil { + argSlice := args.([]interface{}) + head.Args = argSlice[3].(Args) } - return args, nil -} - -func (p *parser) callonFuncArgs1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onFuncArgs1(stack["head"], stack["tail"]) -} - -func (c *current) onRuleHead1(name, key, value interface{}) (interface{}, error) { - - head := &Head{} - - head.Location = currentLocation(c) - head.Name = name.(*Term).Value.(Var) - if key != nil { keySlice := key.([]interface{}) // Head definition above describes the "key" slice. We care about the "Term" element. @@ -3531,8 +3362,7 @@ func (c *current) onRuleHead1(name, key, value interface{}) (interface{}, error) } if key == nil && value == nil { - head.Value = BooleanTerm(true) - head.Value.Location = head.Location + head.Value = BooleanTerm(true).SetLocation(head.Location) } if key != nil && value != nil { @@ -3549,7 +3379,17 @@ func (c *current) onRuleHead1(name, key, value interface{}) (interface{}, error) func (p *parser) callonRuleHead1() (interface{}, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onRuleHead1(stack["name"], stack["key"], stack["value"]) + return p.cur.onRuleHead1(stack["name"], stack["args"], stack["key"], stack["value"]) +} + +func (c *current) onArgs1(head, tail interface{}) (interface{}, error) { + return makeArgs(head, tail, currentLocation(c)) +} + +func (p *parser) callonArgs1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onArgs1(stack["head"], stack["tail"]) } func (c *current) onElse1(val, b interface{}) (interface{}, error) { @@ -3635,10 +3475,15 @@ func (p *parser) callonNonWhitespaceBody1() (interface{}, error) { } func (c *current) onLiteral1(neg, val, with interface{}) (interface{}, error) { - expr := &Expr{} + var expr *Expr + switch val := val.(type) { + case *Expr: + expr = val + case *Term: + expr = &Expr{Terms: val} + } expr.Location = currentLocation(c) expr.Negated = neg != nil - expr.Terms = val if with != nil { sl := with.([]interface{}) @@ -3680,24 +3525,44 @@ func (p *parser) callonWith1() (interface{}, error) { return p.cur.onWith1(stack["target"], stack["value"]) } -func (c *current) onInfixArithExpr1(output, left, op, right interface{}) (interface{}, error) { - return []*Term{op.(*Term), left.(*Term), right.(*Term), output.(*Term)}, nil +func (c *current) onInfixCallExpr1(output, operator, args interface{}) (interface{}, error) { + return makeInfixCallExpr(operator, args, output) +} + +func (p *parser) callonInfixCallExpr1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onInfixCallExpr1(stack["output"], stack["operator"], stack["args"]) +} + +func (c *current) onInfixCallExprReverse1(operator, args, output interface{}) (interface{}, error) { + return makeInfixCallExpr(operator, args, output) +} + +func (p *parser) callonInfixCallExprReverse1() (interface{}, error) { + stack := p.vstack[len(p.vstack)-1] + _ = stack + return p.cur.onInfixCallExprReverse1(stack["operator"], stack["args"], stack["output"]) +} + +func (c *current) onInfixArithExpr1(output, left, operator, right interface{}) (interface{}, error) { + return makeInfixCallExpr(operator, Args{left.(*Term), right.(*Term)}, output) } func (p *parser) callonInfixArithExpr1() (interface{}, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onInfixArithExpr1(stack["output"], stack["left"], stack["op"], stack["right"]) + return p.cur.onInfixArithExpr1(stack["output"], stack["left"], stack["operator"], stack["right"]) } -func (c *current) onInfixArithExprReverse1(left, op, right, output interface{}) (interface{}, error) { - return []*Term{op.(*Term), left.(*Term), right.(*Term), output.(*Term)}, nil +func (c *current) onInfixArithExprReverse1(left, operator, right, output interface{}) (interface{}, error) { + return makeInfixCallExpr(operator, Args{left.(*Term), right.(*Term)}, output) } func (p *parser) callonInfixArithExprReverse1() (interface{}, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onInfixArithExprReverse1(stack["left"], stack["op"], stack["right"], stack["output"]) + return p.cur.onInfixArithExprReverse1(stack["left"], stack["operator"], stack["right"], stack["output"]) } func (c *current) onArithInfixOp1(val interface{}) (interface{}, error) { @@ -3718,17 +3583,24 @@ func (p *parser) callonArithInfixOp1() (interface{}, error) { return p.cur.onArithInfixOp1(stack["val"]) } -func (c *current) onInfixExpr1(left, op, right interface{}) (interface{}, error) { - return []*Term{op.(*Term), left.(*Term), right.(*Term)}, nil +func (c *current) onInfixRelationExpr1(left, operator, right interface{}) (interface{}, error) { + return &Expr{ + Terms: []*Term{ + operator.(*Term), + left.(*Term), + right.(*Term), + }, + Infix: true, + }, nil } -func (p *parser) callonInfixExpr1() (interface{}, error) { +func (p *parser) callonInfixRelationExpr1() (interface{}, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onInfixExpr1(stack["left"], stack["op"], stack["right"]) + return p.cur.onInfixRelationExpr1(stack["left"], stack["operator"], stack["right"]) } -func (c *current) onInfixOp1(val interface{}) (interface{}, error) { +func (c *current) onInfixRelationOp1(val interface{}) (interface{}, error) { op := string(c.text) for _, b := range Builtins { if string(b.Infix) == op { @@ -3740,17 +3612,18 @@ func (c *current) onInfixOp1(val interface{}) (interface{}, error) { return operator, nil } -func (p *parser) callonInfixOp1() (interface{}, error) { +func (p *parser) callonInfixRelationOp1() (interface{}, error) { stack := p.vstack[len(p.vstack)-1] _ = stack - return p.cur.onInfixOp1(stack["val"]) + return p.cur.onInfixRelationOp1(stack["val"]) } func (c *current) onCall1(name, head, tail interface{}) (interface{}, error) { buf := []*Term{name.(*Term)} if head == nil { - return buf, nil + return &Expr{Terms: buf}, nil } + buf = append(buf, head.(*Term)) // PrefixExpr above describes the "tail" structure. We only care about the "Term" elements. @@ -3759,7 +3632,8 @@ func (c *current) onCall1(name, head, tail interface{}) (interface{}, error) { s := v.([]interface{}) buf = append(buf, s[len(s)-1].(*Term)) } - return buf, nil + + return &Expr{Terms: buf}, nil } func (p *parser) callonCall1() (interface{}, error) { @@ -3852,36 +3726,6 @@ func (p *parser) callonArray1() (interface{}, error) { return p.cur.onArray1(stack["head"], stack["tail"]) } -func (c *current) onArgTerm1(val interface{}) (interface{}, error) { - return val, nil -} - -func (p *parser) callonArgTerm1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onArgTerm1(stack["val"]) -} - -func (c *current) onArgObject1(head, tail interface{}) (interface{}, error) { - return makeObject(head, tail, currentLocation(c)) -} - -func (p *parser) callonArgObject1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onArgObject1(stack["head"], stack["tail"]) -} - -func (c *current) onArgArray1(head, tail interface{}) (interface{}, error) { - return makeArray(head, tail, currentLocation(c)) -} - -func (p *parser) callonArgArray1() (interface{}, error) { - stack := p.vstack[len(p.vstack)-1] - _ = stack - return p.cur.onArgArray1(stack["head"], stack["tail"]) -} - func (c *current) onSetEmpty1() (interface{}, error) { set := SetTerm() set.Location = currentLocation(c) diff --git a/ast/parser_ext.go b/ast/parser_ext.go index eb70c7be6a..9ef856fd88 100644 --- a/ast/parser_ext.go +++ b/ast/parser_ext.go @@ -139,12 +139,17 @@ func ParseRuleFromExpr(module *Module, expr *Expr) (*Rule, error) { return nil, fmt.Errorf("negated %v cannot be used for %v", TypeName(expr), RuleTypeName) } - if !expr.IsBuiltin() { + if !expr.IsCall() { return ParsePartialSetDocRuleFromTerm(module, expr.Terms.(*Term)) } if !expr.IsEquality() { - return nil, fmt.Errorf("%v cannot be used for %v", TypeName(expr), RuleTypeName) + for _, bi := range Builtins { + if expr.Operator().Equal(bi.Ref()) { + return nil, fmt.Errorf("%v name conflicts with built-in function", RuleTypeName) + } + } + return ParseRuleFromCallExpr(module, expr.Terms.([]*Term)) } lhs, rhs := expr.Operand(0), expr.Operand(1) @@ -248,6 +253,38 @@ func ParsePartialSetDocRuleFromTerm(module *Module, term *Term) (*Rule, error) { return rule, nil } +// ParseRuleFromCallExpr returns a rule if the terms can be interpreted as a +// function returning true or some value (e.g., f(x) => f(x) = true { true }, +// f(x) = y => f(x) = y { true }). +func ParseRuleFromCallExpr(module *Module, terms []*Term) (*Rule, error) { + var args Args + var value *Term + loc := terms[0].Location + + if len(terms) <= 1 { + return nil, fmt.Errorf("%ss with %v must take at least one argument", RuleTypeName, ArgsTypeName) + } else if len(terms) == 2 { + args = Args{terms[1]} + value = BooleanTerm(true).SetLocation(loc) + } else { + args = terms[1 : len(terms)-1] + value = terms[len(terms)-1] + } + + rule := &Rule{ + Location: loc, + Head: &Head{ + Location: loc, + Name: Var(terms[0].String()), + Args: args, + Value: value, + }, + Module: module, + Body: NewBody(NewExpr(BooleanTerm(true).SetLocation(loc)).SetLocation(loc)), + } + return rule, nil +} + // ParseImports returns a slice of Import objects. func ParseImports(input string) ([]*Import, error) { stmts, _, err := ParseStatements("", input) @@ -433,9 +470,7 @@ func ParseStatements(filename, input string) ([]Statement, []*Comment, error) { } } - postProcess(filename, stmts) - - return stmts, comments, err + return stmts, comments, postProcess(filename, stmts) } func convertErrList(filename string, errs errList) error { @@ -484,9 +519,6 @@ func parseModule(stmts []Statement, comments []*Comment) (*Module, error) { case *Rule: setRuleModule(stmt, mod) mod.Rules = append(mod.Rules, stmt) - case *Func: - setFuncModule(stmt, mod) - mod.Funcs = append(mod.Funcs, stmt) case Body: rule, err := ParseRuleFromBody(mod, stmt) if err != nil { @@ -510,12 +542,6 @@ func parseModule(stmts []Statement, comments []*Comment) (*Module, error) { } func postProcess(filename string, stmts []Statement) error { - for _, stmt := range stmts { - switch stmt := stmt.(type) { - case *Func: - expandVoidFunction(stmt) - } - } if err := mangleDataVars(stmts); err != nil { return err @@ -531,13 +557,6 @@ func postProcess(filename string, stmts []Statement) error { return nil } -func expandVoidFunction(fn *Func) { - if fn.Head.Output == nil { - fn.Head.Output = BooleanTerm(true) - fn.Head.Output.Location = fn.Head.Location - } -} - func mangleDataVars(stmts []Statement) error { for i := range stmts { vt := newVarToRefTransformer(DefaultRootDocument.Value.(Var), DefaultRootRef.Copy()) @@ -633,10 +652,6 @@ func setRuleModule(rule *Rule, module *Module) { } } -func setFuncModule(fn *Func, module *Module) { - fn.Module = module -} - type varToRefTransformer struct { orig Var target Ref @@ -659,12 +674,13 @@ func (vt *varToRefTransformer) Transform(x interface{}) (interface{}, error) { return x, nil } switch x := x.(type) { - case *Head, *FuncHead: - // The next AST node will be the rule/func name (which should not be + case *Head: + // The next AST node will be the rule name (which should not be // transformed). vt.skip = true case Ref: - // The next AST node will be the ref head (which should not be transformed). + // The next AST node will be the ref head (which should not be + // transformed). vt.skip = true case Var: if x.Equal(vt.orig) { diff --git a/ast/parser_test.go b/ast/parser_test.go index 44adf59950..726cb1e967 100644 --- a/ast/parser_test.go +++ b/ast/parser_test.go @@ -414,6 +414,18 @@ func TestInfixArithExpr(t *testing.T) { assertParseOneExpr(t, "plus (reverse)", "1 + 2 = x", Plus.Expr(IntNumberTerm(1), IntNumberTerm(2), VarTerm("x"))) } +func TestInfixCallExpr(t *testing.T) { + assertParseOneExpr(t, "call", "count([true, false]) = x", Count.Expr(ArrayTerm(BooleanTerm(true), BooleanTerm(false)), VarTerm("x"))) + assertParseOneExpr(t, "call-reverse", "x = count([true, false])", Count.Expr(ArrayTerm(BooleanTerm(true), BooleanTerm(false)), VarTerm("x"))) + assertParseOneExpr(t, "call-ref", "foo.bar(1) = x", &Expr{ + Terms: []*Term{ + RefTerm(VarTerm("foo"), StringTerm("bar")), + IntNumberTerm(1), + VarTerm("x"), + }, + }) +} + func TestMiscBuiltinExpr(t *testing.T) { xyz := RefTerm(VarTerm("xyz")) assertParseOneExpr(t, "empty", "xyz()", NewBuiltinExpr(xyz)) @@ -560,60 +572,9 @@ func TestIsValidImportPath(t *testing.T) { } -func TestUserFunctions(t *testing.T) { - assertParseFunc(t, "identity", `f(x) = y { y = x }`, &Func{ - Head: NewFuncHead(Var("f"), VarTerm("y"), VarTerm("x")), - Body: NewBody( - Equality.Expr(VarTerm("y"), VarTerm("x")), - ), - }) - - assertParseFunc(t, "set", `f() = y { y = 42 }`, &Func{ - Head: NewFuncHead(Var("f"), VarTerm("y")), - Body: NewBody( - Equality.Expr(VarTerm("y"), IntNumberTerm(42)), - ), - }) - - assertParseFunc(t, "term input", `f([x, y]) = z { split(x, y, z) }`, &Func{ - Head: NewFuncHead(Var("f"), VarTerm("z"), ArrayTerm(VarTerm("x"), VarTerm("y"))), - Body: NewBody( - Split.Expr(VarTerm("x"), VarTerm("y"), VarTerm("z")), - ), - }) - - assertParseFunc(t, "term output", `f() = [x, y] { split("foo.bar", x, y) }`, &Func{ - Head: NewFuncHead(Var("f"), ArrayTerm(VarTerm("x"), VarTerm("y"))), - Body: NewBody( - Split.Expr(StringTerm("foo.bar"), VarTerm("x"), VarTerm("y")), - ), - }) - - assertParseFunc(t, "comprehension", `f(x) = y { count([1 | x[_]], y) }`, &Func{ - Head: NewFuncHead(Var("f"), VarTerm("y"), VarTerm("x")), - Body: NewBody( - Count.Expr(MustParseTerm("[1 | x[_]]"), VarTerm("y")), - ), - }) - - assertParseFunc(t, "nested braces", `f(x) = y { z = {"foo": "bar", "baz": {"hi": 5}}; y = z.baz.hi }`, &Func{ - Head: NewFuncHead(Var("f"), VarTerm("y"), VarTerm("x")), - Body: NewBody( - Equality.Expr(VarTerm("z"), MustParseTerm(`{"foo": "bar", "baz": {"hi": 5}}`)), - Equality.Expr(VarTerm("y"), MustParseTerm("z.baz.hi")), - ), - }) - - assertParseErrorContains(t, "no output", `f() = { "foo" = "bar" }`, "rego_parse_error: no match found") - assertParseErrorContains(t, "no body", `f() = y`, "rego_parse_error: no match found") - assertParseErrorContains(t, "unmatched braces", `f(x) = y { trim(x, ".", y) `, "rego_parse_error: no match found") - assertParseErrorContains(t, "set input", `f({x}) = y { x = y }`, "rego_parse_error: no match found") - assertParseErrorEquals(t, "empty body", `f() = y {}`, "rego_parse_error: body must be non-empty") -} - func TestRule(t *testing.T) { - assertParseRule(t, "identity", `p = true { true }`, &Rule{ + assertParseRule(t, "constant", `p = true { true }`, &Rule{ Head: NewHead(Var("p"), nil, BooleanTerm(true)), Body: NewBody( &Expr{Terms: BooleanTerm(true)}, @@ -719,11 +680,51 @@ func TestRule(t *testing.T) { Body: MustParseBody(`[data.a[0]] = [{"x": x}]; count(x, 3); sum(x, y); y > 100`), }) + fxy := &Head{ + Name: Var("f"), + Args: Args{VarTerm("x")}, + Value: VarTerm("y"), + } + + assertParseRule(t, "identity", `f(x) = y { y = x }`, &Rule{ + Head: fxy, + Body: NewBody( + Equality.Expr(VarTerm("y"), VarTerm("x")), + ), + }) + + assertParseRule(t, "composite arg", `f([x, y]) = z { split(x, y, z) }`, &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{ArrayTerm(VarTerm("x"), VarTerm("y"))}, + Value: VarTerm("z"), + }, + Body: NewBody( + Split.Expr(VarTerm("x"), VarTerm("y"), VarTerm("z")), + ), + }) + + assertParseRule(t, "composite result", `f(1) = [x, y] { split("foo.bar", x, y) }`, &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{IntNumberTerm(1)}, + Value: ArrayTerm(VarTerm("x"), VarTerm("y")), + }, + Body: NewBody( + Split.Expr(StringTerm("foo.bar"), VarTerm("x"), VarTerm("y")), + ), + }) + + assertParseErrorEquals(t, "empty body", `f(_) = y {}`, "rego_parse_error: body must be non-empty") assertParseErrorEquals(t, "object composite key", "p[[x,y]] = z { true }", "rego_parse_error: object key must be one of string, var, ref not array") assertParseErrorEquals(t, "default ref value", "default p = [data.foo]", "rego_parse_error: default rule value cannot contain ref") assertParseErrorEquals(t, "default var value", "default p = [x]", "rego_parse_error: default rule value cannot contain var") assertParseErrorEquals(t, "empty rule body", "p {}", "rego_parse_error: body must be non-empty") + assertParseErrorContains(t, "0-arity", `f() = 1 { true }`, "rego_parse_error: no match found") + assertParseErrorContains(t, "no output", `f(_) = { "foo" = "bar" }`, "rego_parse_error: no match found") + assertParseErrorContains(t, "unmatched braces", `f(x) = y { trim(x, ".", y) `, "rego_parse_error: no match found") + // TODO(tsandall): improve error checking here. This is a common mistake // and the current error message is not very good. Need to investigate if the // parser can be improved. @@ -750,6 +751,14 @@ func TestRuleElseKeyword(t *testing.T) { p { "p2" } + + f(x) { + x < 100 + } else = false { + x > 200 + } else { + x != 150 + } ` parsed, err := ParseModule("", mod) @@ -794,6 +803,30 @@ func TestRuleElseKeyword(t *testing.T) { Head: head, Body: MustParseBody(`"p2"`), }, + &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{VarTerm("x")}, + Value: BooleanTerm(true), + }, + Body: MustParseBody(`x < 100`), + Else: &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{VarTerm("x")}, + Value: BooleanTerm(false), + }, + Body: MustParseBody(`x > 200`), + Else: &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{VarTerm("x")}, + Value: BooleanTerm(true), + }, + Body: MustParseBody(`x != 150`), + }, + }, + }, }, } @@ -882,7 +915,14 @@ p[x] = y { y = 2 } -q = 1`, +q = 1 + +f(x) { + x < 10 +} { + x > 1000 +} +`, ) if err != nil { @@ -893,7 +933,9 @@ q = 1`, p[x] = y { x = "a"; y = 1 } p[x] = y { x = "b"; y = 2 } -q = 1 { true }`, +q = 1 { true } +f(x) { x < 10 } +f(x) { x > 1000 }`, ) if !expected.Equal(result) { @@ -1042,6 +1084,8 @@ bar[1] bar[[{"foo":"baz"}]] input = 1 data = 2 +f(1) = 2 +f(1) ` assertParseModule(t, "rules from bodies", testModule, &Module{ @@ -1060,6 +1104,8 @@ data = 2 MustParseRule(`bar[[{"foo":"baz"}]] { true }`), MustParseRule(`input = 1 { true }`), MustParseRule(`data = 2 { true }`), + MustParseRule(`f(1) = 2 { true }`), + MustParseRule(`f(1) = true { true }`), }, }) @@ -1120,6 +1166,11 @@ data = {"bar": 2} { true }` p` + zeroArgs := ` + package a.b.c + + p()` + assertParseModuleError(t, "multiple expressions", multipleExprs) assertParseModuleError(t, "non-equality", nonEquality) assertParseModuleError(t, "non-var name", nonVarName) @@ -1128,6 +1179,7 @@ data = {"bar": 2} { true }` assertParseModuleError(t, "bad ref (too long)", badRefLen2) assertParseModuleError(t, "negated", negated) assertParseModuleError(t, "non ref term", nonRefTerm) + assertParseModuleError(t, "zero args", zeroArgs) } func TestWildcards(t *testing.T) { @@ -1372,12 +1424,3 @@ func assertParseRule(t *testing.T, msg string, input string, correct *Rule) { } }) } - -func assertParseFunc(t *testing.T, msg string, input string, correct *Func) { - assertParseOne(t, msg, input, func(parsed interface{}) { - fn := parsed.(*Func) - if !fn.Equal(correct) { - t.Errorf("Error on test %s: funcs not equal: %v (parsed), %v (correct)", msg, fn, correct) - } - }) -} diff --git a/ast/policy.go b/ast/policy.go index cddcbee7d6..3cf51fa9e8 100644 --- a/ast/policy.go +++ b/ast/policy.go @@ -8,6 +8,7 @@ import ( "fmt" "strings" + "github.com/open-policy-agent/opa/types" "github.com/open-policy-agent/opa/util" ) @@ -101,7 +102,6 @@ type ( Package *Package `json:"package"` Imports []*Import `json:"imports,omitempty"` Rules []*Rule `json:"rules,omitempty"` - Funcs []*Func `json:"funcs,omitempty"` Comments []*Comment `json:"comments,omitempty"` } @@ -146,34 +146,12 @@ type ( Head struct { Location *Location `json:"-"` Name Var `json:"name"` + Args Args `json:"args,omitempty"` Key *Term `json:"key,omitempty"` Value *Term `json:"value,omitempty"` } - // Func represents a user function as defined in the language. Funcs define - // reusable queries than can be called like built-ins, but have access to the - // data and input documents like rules. - Func struct { - Location *Location `json:"-"` - Head *FuncHead `json:"head"` - Body Body `json:"body"` - - // Module is a pointer to the module containing this func. If the func - // was NOT created while parsing/constructing a module, this should be - // left unset. The pointer is not included in any standard operations - // on the func (e.g., printing, comparison, visiting, etc.) - Module *Module `json:"-"` - } - - // FuncHead represents the head of a user function. - FuncHead struct { - Location *Location `json:"-"` - Name Var `json:"name"` - Args Args `json:"args,omitempty"` - Output *Term `json:"output,omitempty"` - } - - // Args represents zero or more arguments to a user function. + // Args represents zero or more arguments to a rule. Args []*Term // Body represents one or more expressions contained inside a rule or user @@ -187,6 +165,7 @@ type ( Negated bool `json:"negated,omitempty"` Terms interface{} `json:"terms"` With []*With `json:"with,omitempty"` + Infix bool `json:"infix,omitempty"` } // With represents a modifier on an expression. @@ -489,73 +468,6 @@ func (rule *Rule) elseString() string { return strings.Join(buf, " ") } -// Compare returns an integer indicating whether f is less than, equal to, -// or greater than other. -func (f *Func) Compare(other *Func) int { - if f == nil { - if other == nil { - return 0 - } - return -1 - } else if other == nil { - return 1 - } - - if cmp := f.Head.Compare(other.Head); cmp != 0 { - return cmp - } - return f.Body.Compare(other.Body) -} - -// Copy returns a deep copy of f. -func (f *Func) Copy() *Func { - cpy := *f - cpy.Head = f.Head.Copy() - cpy.Body = f.Body.Copy() - return &cpy -} - -// Equal returns true if f is equal to other. -func (f *Func) Equal(other *Func) bool { - return f.Compare(other) == 0 -} - -// Loc returns the location of the Func in the definition. -func (f *Func) Loc() *Location { - return f.Head.Location -} - -// Path returns a ref referring to the this Func. If f is not contained in a -// module, this function panics. -func (f *Func) Path() Ref { - if f.Module == nil { - panic("assertion failed") - } - - pkg := f.Module.Package.Path - head, tail := pkg[1], pkg[2:] - h := VarTerm(string(head.Value.(String))) - - global := append(Ref{h}, tail...) - global = append(global, StringTerm(f.Head.Name.String())) - return global -} - -func (f *Func) String() string { - return f.Head.String() + " { " + f.Body.String() + " }" -} - -// NewFuncHead returns a new FuncHead objects. If args are provided, they denote -// the inputs to the function. -func NewFuncHead(name Var, out *Term, args ...*Term) *FuncHead { - head := &FuncHead{ - Name: name, - } - head.Args = args - head.Output = out - return head -} - // NewHead returns a new Head object. If args are provided, the first will be // used for the key and the second will be used for the value. func NewHead(name Var, args ...*Term) *Head { @@ -609,6 +521,9 @@ func (head *Head) Compare(other *Head) int { } else if other == nil { return 1 } + if cmp := Compare(head.Args, other.Args); cmp != 0 { + return cmp + } if cmp := Compare(head.Name, other.Name); cmp != 0 { return cmp } @@ -621,6 +536,7 @@ func (head *Head) Compare(other *Head) int { // Copy returns a deep copy of head. func (head *Head) Copy() *Head { cpy := *head + cpy.Args = head.Args.Copy() cpy.Key = head.Key.Copy() cpy.Value = head.Value.Copy() return &cpy @@ -633,7 +549,9 @@ func (head *Head) Equal(other *Head) bool { func (head *Head) String() string { var buf []string - if head.Key != nil { + if len(head.Args) != 0 { + buf = append(buf, head.Name.String()+head.Args.String()) + } else if head.Key != nil { buf = append(buf, head.Name.String()+"["+head.Key.String()+"]") } else { buf = append(buf, head.Name.String()) @@ -648,6 +566,7 @@ func (head *Head) String() string { // Vars returns a set of vars found in the head. func (head *Head) Vars() VarSet { vis := &VarVisitor{vars: VarSet{}} + // FIXME(tsandall): include args? if head.Key != nil { Walk(vis, head.Key) } @@ -657,70 +576,6 @@ func (head *Head) Vars() VarSet { return vis.vars } -// Compare returns an integer indicating whether h is less than, equal to, -// or greater than other. -func (h *FuncHead) Compare(other *FuncHead) int { - if h == nil { - if other == nil { - return 0 - } - return -1 - } else if other == nil { - return 1 - } - - if cmp := Compare(h.Name, other.Name); cmp != 0 { - return cmp - } - if cmp := Compare(h.Args, other.Args); cmp != 0 { - return cmp - } - return Compare(h.Output, other.Output) -} - -// Copy returns a deep copy of h. -func (h *FuncHead) Copy() *FuncHead { - cpy := *h - cpy.Args = h.Args.Copy() - cpy.Output = h.Output.Copy() - return &cpy -} - -// Equal returns true if h is equal to other. -func (h *FuncHead) Equal(other *FuncHead) bool { - return h.Compare(other) == 0 -} - -// Loc returns the location of the FuncHead in the definition. -func (h *FuncHead) Loc() *Location { - return h.Location -} - -// Vars returns a set of vars found in the FuncHead. -func (h *FuncHead) Vars() VarSet { - vars := h.ArgVars() - vars.Update(h.OutVars()) - return vars -} - -// ArgVars returns a set of vars found in the FuncHead's arguments. -func (h *FuncHead) ArgVars() VarSet { - vis := &VarVisitor{vars: VarSet{}} - Walk(vis, h.Args) - return vis.vars -} - -// OutVars returns a set of vars found in the FuncHead's output. -func (h *FuncHead) OutVars() VarSet { - vis := &VarVisitor{vars: VarSet{}} - Walk(vis, h.Output) - return vis.vars -} - -func (h *FuncHead) String() string { - return h.Name.String() + h.Args.String() + " = " + h.Output.String() -} - // Copy returns a deep copy of a. func (a Args) Copy() Args { cpy := Args{} @@ -738,6 +593,13 @@ func (a Args) String() string { return "(" + strings.Join(buf, ", ") + ")" } +// Vars returns a set of vars that appear in a. +func (a Args) Vars() VarSet { + vis := &VarVisitor{vars: VarSet{}} + Walk(vis, a) + return vis.vars +} + // NewBody returns a new Body containing the given expressions. The indices of // the immediate expressions will be reset. func NewBody(exprs ...*Expr) Body { @@ -997,15 +859,15 @@ func (expr *Expr) IsEquality() bool { return terms[0].Value.Compare(Equality.Ref()) == 0 } -// IsBuiltin returns true if this expression refers to a function. -func (expr *Expr) IsBuiltin() bool { +// IsCall returns true if this expression calls a function. +func (expr *Expr) IsCall() bool { _, ok := expr.Terms.([]*Term) return ok } -// Name returns the name of the user function or built-in this expression -// refers to. If this expression is not a function call, returns nil. -func (expr *Expr) Name() Ref { +// Operator returns the name of the function or built-in this expression refers +// to. If this expression is not a function call, returns nil. +func (expr *Expr) Operator() Ref { terms, ok := expr.Terms.([]*Term) if !ok || len(terms) == 0 { return nil @@ -1074,14 +936,7 @@ func (expr *Expr) OutputVars(safe VarSet) VarSet { } return expr.outputVarsBuiltins(b, safe) } - - // Mark output variables as safe for user - // functions. - last := terms[len(terms)-1] - WalkVars(last, func(v Var) bool { - safe.Add(v) - return false - }) + return expr.outputVarsFunc(safe, terms) } } return VarSet{} @@ -1103,26 +958,22 @@ func (expr *Expr) String() string { name := t[0].String() bi := BuiltinMap[name] var s string - if bi != nil && len(bi.Infix) > 0 { - switch len(bi.Args) { - case 2: - s = fmt.Sprintf("%v %v %v", t[1], string(bi.Infix), t[2]) - case 3: - // Special case for "x = y z" built-ins. - if len(bi.TargetPos) == 1 && bi.TargetPos[0] == 2 { - s = fmt.Sprintf("%v = %v %v %v", t[3], t[1], string(bi.Infix), t[2]) - } + // Handle infix operators (e.g., =, !=, >=, +, /, etc.) + if bi != nil && bi.Infix != "" { + if types.Compare(bi.Decl.Result(), types.T) == 0 { + s = fmt.Sprintf("%v %v %v", t[1], bi.Infix, t[2]) + } else { + s = fmt.Sprintf("%v = %v %v %v", t[3], t[1], bi.Infix, t[2]) } } + // Handle infix call expressions. + if len(s) == 0 && expr.Infix { + s = fmt.Sprintf("%v = %v%v", t[len(t)-1], t[0], Args(t[1:len(t)-1])) + } + // Handle anything else. if len(s) == 0 { - var args []string - for _, v := range t[1:] { - args = append(args, v.String()) - } - name := string(t[0].String()) - s = fmt.Sprintf("%s(%s)", name, strings.Join(args, ", ")) + s = fmt.Sprintf("%v%v", t[0], Args(t[1:])) } - buf = append(buf, s) case *Term: @@ -1203,6 +1054,41 @@ func (expr *Expr) outputVarsEquality(safe VarSet) VarSet { return o.Diff(safe) } +func (expr *Expr) outputVarsFunc(safe VarSet, terms []*Term) VarSet { + + // Functions called with 0 or 1 args cannot produce output vars. + if len(expr.Operands()) < 2 { + return VarSet{} + } + + o := expr.outputVarsRefs(safe) + + // Find unsafe input vars. + args := Args(terms[:len(terms)-1]) + vis := NewVarVisitor().WithParams(VarVisitorParams{ + SkipClosures: true, + SkipObjectKeys: true, + SkipRefHead: true, + }) + Walk(vis, args) + unsafe := vis.Vars().Diff(o).Diff(safe) + if len(unsafe) > 0 { + return VarSet{} + } + + // Find safe output vars. + vis = NewVarVisitor().WithParams(VarVisitorParams{ + SkipRefHead: true, + SkipSets: true, + SkipObjectKeys: true, + SkipClosures: true, + }) + Walk(vis, terms[len(terms)-1]) + o.Update(vis.vars) + + return o +} + func (expr *Expr) outputVarsRefs(safe VarSet) VarSet { o := VarSet{} WalkRefs(expr, func(r Ref) bool { diff --git a/ast/policy_test.go b/ast/policy_test.go index 2c4c16afdd..0473913b88 100644 --- a/ast/policy_test.go +++ b/ast/policy_test.go @@ -220,26 +220,26 @@ func TestExprOutputVars(t *testing.T) { RegisterBuiltin(&Builtin{ Name: "test_out_array", - Args: []types.Type{ + Decl: types.NewFunction( types.NewArray(nil, types.N), - }, + ), TargetPos: []int{0}, }) RegisterBuiltin(&Builtin{ Name: "test_out_set", - Args: []types.Type{ + Decl: types.NewFunction( types.NewArray(nil, types.N), - }, + ), TargetPos: []int{0}, }) RegisterBuiltin(&Builtin{ Name: "foo", - Args: []types.Type{ + Decl: types.NewFunction( types.A, types.A, - }, + ), TargetPos: []int{1}, }) @@ -321,13 +321,25 @@ func TestExprString(t *testing.T) { StringTerm("foo"), VarTerm("x"), ) + expr7.Infix = true + expr8 := &Expr{ + Terms: []*Term{ + RefTerm(VarTerm("data"), StringTerm("test"), StringTerm("f")), + IntNumberTerm(1), + VarTerm("x"), + }, + } + expr8.Infix = true + expr9 := Contains.Expr(StringTerm("foo.bar"), StringTerm(".")) assertExprString(t, expr1, "q.r[x]") assertExprString(t, expr2, "not q.r[x]") assertExprString(t, expr3, "\"a\" = 17.1") assertExprString(t, expr4, "{foo: [1, a.b]} != false") assertExprString(t, expr5, "true with foo as bar with baz as qux") assertExprString(t, expr6, "3 = 1 + 2") - assertExprString(t, expr7, "count(\"foo\", x)") + assertExprString(t, expr7, "x = count(\"foo\")") + assertExprString(t, expr8, "x = data.test.f(1)") + assertExprString(t, expr9, `contains("foo.bar", ".")`) } func TestExprBadJSON(t *testing.T) { @@ -389,23 +401,13 @@ func TestRuleHeadEquals(t *testing.T) { assertHeadsEqual(t, &Head{Name: Var("p")}, &Head{Name: Var("p")}) assertHeadsEqual(t, &Head{Key: VarTerm("x")}, &Head{Key: VarTerm("x")}) assertHeadsEqual(t, &Head{Value: VarTerm("x")}, &Head{Value: VarTerm("x")}) + assertHeadsEqual(t, &Head{Args: []*Term{VarTerm("x"), VarTerm("y")}}, &Head{Args: []*Term{VarTerm("x"), VarTerm("y")}}) // Different name/key/value assertHeadsNotEqual(t, &Head{Name: Var("p")}, &Head{Name: Var("q")}) assertHeadsNotEqual(t, &Head{Key: VarTerm("x")}, &Head{Key: VarTerm("y")}) assertHeadsNotEqual(t, &Head{Value: VarTerm("x")}, &Head{Value: VarTerm("y")}) -} - -func TestFuncHeadEquals(t *testing.T) { - assertFuncHeadsEqual(t, &FuncHead{}, &FuncHead{}) - - assertFuncHeadsEqual(t, &FuncHead{Name: Var("f")}, &FuncHead{Name: Var("f")}) - assertFuncHeadsEqual(t, &FuncHead{Args: []*Term{VarTerm("x"), VarTerm("y")}}, &FuncHead{Args: []*Term{VarTerm("x"), VarTerm("y")}}) - assertFuncHeadsEqual(t, &FuncHead{Output: ArrayTerm(VarTerm("x"), VarTerm("y"))}, &FuncHead{Output: ArrayTerm(VarTerm("x"), VarTerm("y"))}) - - assertFuncHeadsNotEqual(t, &FuncHead{Name: Var("f")}, &FuncHead{Name: Var("b")}) - assertFuncHeadsNotEqual(t, &FuncHead{Args: []*Term{VarTerm("x"), VarTerm("z")}}, &FuncHead{Args: []*Term{VarTerm("x"), VarTerm("y")}}) - assertFuncHeadsNotEqual(t, &FuncHead{Output: ArrayTerm(VarTerm("x"), VarTerm("z"))}, &FuncHead{Output: ArrayTerm(VarTerm("z"), VarTerm("y"))}) + assertHeadsNotEqual(t, &Head{Args: []*Term{VarTerm("x"), VarTerm("z")}}, &Head{Args: []*Term{VarTerm("x"), VarTerm("y")}}) } func TestRuleBodyEquals(t *testing.T) { @@ -458,37 +460,19 @@ func TestRuleString(t *testing.T) { Head: NewHead("p", nil, BooleanTerm(true)), } + rule4 := &Rule{ + Head: &Head{ + Name: Var("f"), + Args: Args{VarTerm("x"), VarTerm("y")}, + Value: VarTerm("z"), + }, + Body: NewBody(Plus.Expr(VarTerm("x"), VarTerm("y"), VarTerm("z"))), + } + assertRuleString(t, rule1, `p { "foo" = "bar" }`) assertRuleString(t, rule2, `p[x] = y { "foo" = x; not a.b[x]; "b" = y }`) assertRuleString(t, rule3, `default p = true`) -} - -func TestFuncString(t *testing.T) { - fn1 := &Func{ - Head: NewFuncHead(Var("foo"), VarTerm("y")), - Body: NewBody( - Equality.Expr(VarTerm("y"), StringTerm("bar")), - ), - } - - fn2 := &Func{ - Head: NewFuncHead(Var("foo"), VarTerm("y"), VarTerm("x")), - Body: NewBody( - Equality.Expr(VarTerm("y"), VarTerm("x")), - ), - } - - fn3 := &Func{ - Head: NewFuncHead(Var("foo"), ArrayTerm(VarTerm("y"), VarTerm("z")), VarTerm("x")), - Body: NewBody( - Equality.Expr(VarTerm("y"), IntNumberTerm(5)), - MustParseExpr(`z = x + y`), - ), - } - - assertFuncString(t, fn1, `foo() = y { y = "bar" }`) - assertFuncString(t, fn2, `foo(x) = y { y = x }`) - assertFuncString(t, fn3, `foo(x) = [y, z] { y = 5; z = x + y }`) + assertRuleString(t, rule4, "f(x, y) = z { z = x + y }") } func TestModuleString(t *testing.T) { @@ -614,28 +598,9 @@ func assertHeadsNotEqual(t *testing.T, a, b *Head) { } } -func assertFuncHeadsEqual(t *testing.T, a, b *FuncHead) { - if !a.Equal(b) { - t.Errorf("FuncHeads are not equal (expected equal): a=%v b=%v", a, b) - } -} - -func assertFuncHeadsNotEqual(t *testing.T, a, b *FuncHead) { - if a.Equal(b) { - t.Errorf("FuncHeads are equal (expected not equal): a=%v b=%v", a, b) - } -} - func assertRuleString(t *testing.T, rule *Rule, expected string) { result := rule.String() if result != expected { t.Errorf("Expected %v but got %v", expected, result) } } - -func assertFuncString(t *testing.T, fn *Func, expected string) { - result := fn.String() - if result != expected { - t.Errorf("Expected %v but got %v", expected, result) - } -} diff --git a/ast/rego.peg b/ast/rego.peg index 657e776754..769d7d2d8a 100644 --- a/ast/rego.peg +++ b/ast/rego.peg @@ -91,6 +91,35 @@ func makeArray(head interface{}, tail interface{}, loc *Location) (*Term, error) return arr, nil } +func makeArgs(head interface{}, tail interface{}, loc *Location) (Args, error) { + args := Args{} + if head == nil { + return nil, nil + } + args = append(args, head.(*Term)) + tailSlice := tail.([]interface{}) + for _, v := range tailSlice { + s := v.([]interface{}) + args = append(args, s[len(s) - 1].(*Term)) + } + return args, nil +} + +func makeInfixCallExpr(operator interface{}, args interface{}, output interface{}) (*Expr, error) { + expr := &Expr{} + a := args.(Args) + terms := make([]*Term, len(a)+2) + terms[0] = operator.(*Term) + dst := terms[1:] + for i := 0; i < len(a); i++ { + dst[i] = a[i] + } + terms[len(terms)-1] = output.(*Term) + expr.Terms = terms + expr.Infix = true + return expr, nil +} + } Program <- _ vals:(head:Stmt tail:(ws Stmt)*)? _ EOF { @@ -111,7 +140,7 @@ Program <- _ vals:(head:Stmt tail:(ws Stmt)*)? _ EOF { return program{buf, c.globalStore[commentsKey]}, nil } -Stmt <- val:(Package / Import / Rules / UserFunc / Body / Comment) { +Stmt <- val:(Package / Import / Rules / Body / Comment) { return val, nil } @@ -252,6 +281,7 @@ NormalRules <- head:RuleHead _ b:(NonEmptyBraceEnclosedBody ( _ RuleExt)* ) { Location: re.loc, Head: &Head{ Name: prev.Head.Name, + Args: prev.Head.Args.Copy(), Value: re.term, Location: re.term.Location, }, @@ -265,66 +295,23 @@ NormalRules <- head:RuleHead _ b:(NonEmptyBraceEnclosedBody ( _ RuleExt)* ) { return rules, nil } -UserFunc <- head:FuncHead _ b:NonEmptyBraceEnclosedBody { - - if head == nil { - return nil, nil - } +RuleHead <- name:Var args:( _ "(" _ Args _ ")" _ )? key:( _ "[" _ Term _ "]" _ )? value:( _ "=" _ Term )? { - f := &Func{ - Location: currentLocation(c), - Head: head.(*FuncHead), - Body: b.(Body), - } - - return f, nil -} - -FuncHead <- name:Var args:FuncArgs output:( _ "=" _ Term )? { - - head := &FuncHead{} + head := &Head{} head.Location = currentLocation(c) head.Name = name.(*Term).Value.(Var) - head.Args = args.(Args) - - if output != nil { - valueSlice := output.([]interface{}) - head.Output = valueSlice[len(valueSlice)-1].(*Term) - } - - return head, nil -} -FuncArgs <- _ "(" _ head:ArgTerm? tail:(_ ',' _ ArgTerm)* _ ")" _ { - args := Args{} - if head == nil { - return args, nil - } + if args != nil && key != nil { + return nil, fmt.Errorf("partial %v/%v %vs cannot take arguments", SetTypeName, ObjectTypeName, RuleTypeName) + } - first := head.(*Term) - first.Location = currentLocation(c) - args = append(args, first) + if args != nil { + argSlice := args.([]interface{}) + head.Args = argSlice[3].(Args) + } - tailSlice := tail.([]interface{}) - for _, v := range tailSlice { - s := v.([]interface{}) - arg := s[len(s) - 1].(*Term) - arg.Location = currentLocation(c) - args = append(args, arg) - } - - return args, nil -} - -RuleHead <- name:Var key:( _ "[" _ Term _ "]" _ )? value:( _ "=" _ Term )? { - - head := &Head{} - - head.Location = currentLocation(c) - head.Name = name.(*Term).Value.(Var) - - if key != nil { + if key != nil { keySlice := key.([]interface{}) // Head definition above describes the "key" slice. We care about the "Term" element. head.Key = keySlice[3].(*Term) @@ -337,8 +324,7 @@ RuleHead <- name:Var key:( _ "[" _ Term _ "]" _ )? value:( _ "=" _ Term )? { } if key == nil && value == nil { - head.Value = BooleanTerm(true) - head.Value.Location = head.Location + head.Value = BooleanTerm(true).SetLocation(head.Location) } if key != nil && value != nil { @@ -352,6 +338,10 @@ RuleHead <- name:Var key:( _ "[" _ Term _ "]" _ )? value:( _ "=" _ Term )? { return head, nil } +Args <- _ head:Term tail:(_ ',' _ Term)* _ ','? _ { + return makeArgs(head, tail, currentLocation(c)) +} + Else <- "else" val:( _ "=" _ Term )? b:( _ NonEmptyBraceEnclosedBody ) { bs := b.([]interface{}) body := bs[1].(Body) @@ -407,10 +397,15 @@ NonWhitespaceBody <- head:Literal tail:( _ ";" _ Literal)* { } Literal <- neg:( "not" ws )? val:Expr with:( ws With (ws With)* )? { - expr := &Expr{} + var expr *Expr + switch val := val.(type) { + case *Expr: + expr = val + case *Term: + expr = &Expr{Terms: val} + } expr.Location = currentLocation(c) expr.Negated = neg != nil - expr.Terms = val if with != nil { sl := with.([]interface{}) @@ -440,14 +435,24 @@ With <- "with" ws target:Term ws "as" ws value:Term { return with, nil } -Expr <- (InfixArithExpr / InfixArithExprReverse) / InfixExpr / PrefixExpr / Term +Expr <- InfixExpr / PrefixExpr / Term + +InfixExpr <- (InfixCallExpr / InfixCallExprReverse) / (InfixArithExpr / InfixArithExprReverse) / InfixRelationExpr + +InfixCallExpr <- output:Term _ "=" _ operator:Operator _ "(" _ args:Args _ ")" { + return makeInfixCallExpr(operator, args, output) +} + +InfixCallExprReverse <- operator:Operator _ "(" _ args:Args _ ")" _ "=" _ output:Term { + return makeInfixCallExpr(operator, args, output) +} -InfixArithExpr <- output:Term _ "=" _ left:Term _ op:ArithInfixOp _ right:Term { - return []*Term{op.(*Term), left.(*Term), right.(*Term), output.(*Term)}, nil +InfixArithExpr <- output:Term _ "=" _ left:Term _ operator:ArithInfixOp _ right:Term { + return makeInfixCallExpr(operator, Args{left.(*Term), right.(*Term)}, output) } -InfixArithExprReverse <- left:Term _ op:ArithInfixOp _ right:Term _ "=" _ output:Term { - return []*Term{op.(*Term), left.(*Term), right.(*Term), output.(*Term)}, nil +InfixArithExprReverse <- left:Term _ operator:ArithInfixOp _ right:Term _ "=" _ output:Term { + return makeInfixCallExpr(operator, Args{left.(*Term), right.(*Term)}, output) } ArithInfixOp <- val:("+" / "-" / "*" / "/" / "&" / "|" / "-") { @@ -462,11 +467,18 @@ ArithInfixOp <- val:("+" / "-" / "*" / "/" / "&" / "|" / "-") { return operator, nil } -InfixExpr <- left:Term _ op:InfixOp _ right:Term { - return []*Term{op.(*Term), left.(*Term), right.(*Term)}, nil +InfixRelationExpr <- left:Term _ operator:InfixRelationOp _ right:Term { + return &Expr{ + Terms: []*Term{ + operator.(*Term), + left.(*Term), + right.(*Term), + }, + Infix: true, + }, nil } -InfixOp <- val:("=" / "!=" / "<=" / ">=" / "<" / ">") { +InfixRelationOp <- val:("=" / "!=" / "<=" / ">=" / "<" / ">") { op := string(c.text) for _, b := range Builtins { if string(b.Infix) == op { @@ -483,8 +495,9 @@ PrefixExpr <- SetEmpty / Call Call <- name:Operator "(" _ head:Term? tail:( _ "," _ Term )* _ ")" { buf := []*Term{name.(*Term)} if head == nil { - return buf, nil + return &Expr{Terms: buf}, nil } + buf = append(buf, head.(*Term)) // PrefixExpr above describes the "tail" structure. We only care about the "Term" elements. @@ -493,7 +506,8 @@ Call <- name:Operator "(" _ head:Term? tail:( _ "," _ Term )* _ ")" { s := v.([]interface{}) buf = append(buf, s[len(s) - 1].(*Term)) } - return buf, nil + + return &Expr{Terms: buf}, nil } Operator <- val:(Ref / Var) { @@ -546,25 +560,6 @@ Array <- '[' _ head:Term? tail:(_ ',' _ Term)* _ ','? _ ']' { return makeArray(head, tail, currentLocation(c)) } -// Function arguments can only be declared as scalars, vars, objects or arrays -// (recursively restricted). -// Anything else really doesn't make sense. Note that the values passed -// to the functions can be any term, whenever a declared input variable is -// encountered, it should just wrap whatever type is left on the runtime end. -ArgTerm <- val:(Scalar / Var / ArgObject / ArgArray) { - return val, nil -} - -ArgObject <- '{' _ head:(ArgKey _ ':' _ ArgTerm)? tail:( _ ',' _ ArgKey _ ':' _ ArgTerm )* _ ','? _ '}' { - return makeObject(head, tail, currentLocation(c)) -} - -ArgKey <- Scalar - -ArgArray <- '[' _ head:ArgTerm? tail:(_ ',' _ ArgTerm)* _ ','? _ ']' { - return makeArray(head, tail, currentLocation(c)) -} - Set <- SetEmpty / SetNonEmpty SetEmpty <- "set(" _ ")" { diff --git a/ast/strings.go b/ast/strings.go index c97d008580..d115dd12e2 100644 --- a/ast/strings.go +++ b/ast/strings.go @@ -31,8 +31,6 @@ const ( BodyTypeName = "body" HeadTypeName = "head" RuleTypeName = "rule" - FuncTypeName = "func" - FuncHeadTypeName = "funchead" ArgsTypeName = "args" ImportTypeName = "import" PackageTypeName = "package" diff --git a/ast/transform.go b/ast/transform.go index 2779ca263b..7c473e1bc6 100644 --- a/ast/transform.go +++ b/ast/transform.go @@ -108,6 +108,9 @@ func Transform(t Transformer, x interface{}) (interface{}, error) { if y.Name, err = transformVar(t, y.Name); err != nil { return nil, err } + if y.Args, err = transformArgs(t, y.Args); err != nil { + return nil, err + } if y.Key != nil { if y.Key, err = transformTerm(t, y.Key); err != nil { return nil, err @@ -119,27 +122,6 @@ func Transform(t Transformer, x interface{}) (interface{}, error) { } } return y, nil - case *Func: - if y.Head, err = transformFuncHead(t, y.Head); err != nil { - return nil, err - } - if y.Body, err = transformBody(t, y.Body); err != nil { - return nil, err - } - return y, nil - case *FuncHead: - if y.Name, err = transformVar(t, y.Name); err != nil { - return nil, err - } - for i := range y.Args { - if y.Args[i], err = transformTerm(t, y.Args[i]); err != nil { - return nil, err - } - } - if y.Output, err = transformTerm(t, y.Output); err != nil { - return nil, err - } - return y, nil case Args: for i := range y { if y[i], err = transformTerm(t, y[i]); err != nil { @@ -305,17 +287,16 @@ func transformHead(t Transformer, head *Head) (*Head, error) { } return h, nil } - -func transformFuncHead(t Transformer, head *FuncHead) (*FuncHead, error) { - y, err := Transform(t, head) +func transformArgs(t Transformer, args Args) (Args, error) { + y, err := Transform(t, args) if err != nil { return nil, err } - h, ok := y.(*FuncHead) + a, ok := y.(Args) if !ok { - return nil, fmt.Errorf("illegal transform: %T != %T", head, y) + return nil, fmt.Errorf("illegal transform: %T != %T", args, y) } - return h, nil + return a, nil } func transformBody(t Transformer, body Body) (Body, error) { diff --git a/ast/visit.go b/ast/visit.go index 035c4d91ce..89961a96b3 100644 --- a/ast/visit.go +++ b/ast/visit.go @@ -28,9 +28,6 @@ func Walk(v Visitor, x interface{}) { for _, r := range x.Rules { Walk(w, r) } - for _, f := range x.Funcs { - Walk(w, f) - } for _, c := range x.Comments { Walk(w, c) } @@ -47,6 +44,7 @@ func Walk(v Visitor, x interface{}) { } case *Head: Walk(w, x.Name) + Walk(w, x.Args) if x.Key != nil { Walk(w, x.Key) } @@ -57,16 +55,9 @@ func Walk(v Visitor, x interface{}) { for _, e := range x { Walk(w, e) } - case *Func: - Walk(w, x.Head) - Walk(w, x.Body) - case *FuncHead: - Walk(w, x.Name) - Walk(w, x.Args) - Walk(w, x.Output.Value) case Args: for _, t := range x { - Walk(w, t.Value) + Walk(w, t) } case *Expr: switch ts := x.Terms.(type) { @@ -164,18 +155,6 @@ func WalkRules(x interface{}, f func(*Rule) bool) { Walk(vis, x) } -// WalkFuncs calls the function f on all user functions under x. If the function f -// returns true, AST nodes under the last node will not be visited. -func WalkFuncs(x interface{}, f func(*Func) bool) { - vis := &GenericVisitor{func(x interface{}) bool { - if fn, ok := x.(*Func); ok { - return f(fn) - } - return false - }} - Walk(vis, x) -} - // WalkVars calls the function f on all vars under x. If the function f // returns true, AST nodes under the last node will not be visited. func WalkVars(x interface{}, f func(Var) bool) { @@ -321,18 +300,6 @@ func (vis *VarVisitor) Visit(v interface{}) Visitor { } } } - if vis.params.SkipFuncVars { - if f, ok := v.(*Func); ok { - Walk(vis, f.Body) - return nil - } - if _, ok := v.(*FuncHead); ok { - return nil - } - if _, ok := v.(Args); ok { - return nil - } - } if v, ok := v.(Var); ok { vis.vars.Add(v) } diff --git a/ast/visit_test.go b/ast/visit_test.go index 6200a804dd..6ca3bd3dcd 100644 --- a/ast/visit_test.go +++ b/ast/visit_test.go @@ -63,6 +63,7 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } rule head t + args term x term @@ -228,6 +229,7 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } rule head p + args # not counted term true @@ -238,6 +240,7 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } rule head p + args # not counted term true @@ -248,6 +251,7 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } rule head p + args # not counted term true @@ -258,12 +262,13 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } func head fn - term - array - term - x - term - y + args + term + array + term + x + term + y term z body @@ -288,8 +293,8 @@ fn([x, y]) = z { json.unmarshal(x, z); z > y } term y */ - if len(vis.elems) != 240 { - t.Errorf("Expected exactly 240 elements in AST but got %d: %v", len(vis.elems), vis.elems) + if len(vis.elems) != 246 { + t.Errorf("Expected exactly 246 elements in AST but got %d: %v", len(vis.elems), vis.elems) } } diff --git a/docs/book/how-do-i-write-policies.md b/docs/book/how-do-i-write-policies.md index 2e05727652..a0b3ed35b3 100644 --- a/docs/book/how-do-i-write-policies.md +++ b/docs/book/how-do-i-write-policies.md @@ -868,9 +868,9 @@ undefined In some cases, having an undefined result for a document is not desirable. In those cases, policies can use the [Default Keyword](#default) to provide a fallback value. -## User Functions +### Functions -Rego supports user defined functions that can be called with the same semantics as [Built-in Functions](#built-ins). User functions do not need to be imported, and can be accessed by their name preceded by their package name. They have access to both the [the data Document](/how-does-opa-work.md#the-data-document) and [the input Document](/how-does-opa-work.md#the-input-document). +Rego supports user-defined functions that can be called with the same semantics as [Built-in Functions](#built-ins). They have access to both the [the data Document](/how-does-opa-work.md#the-data-document) and [the input Document](/how-does-opa-work.md#the-input-document). For example, the following function will return the result of trimming the spaces from a string and then splitting it by periods. @@ -888,11 +888,8 @@ For example, the following function will return the result of trimming the space +---------------+ ``` -In general, functions may have an arbitrary number of inputs, but exactly one output. All function inputs must be either Scalars, [Variables](#variables), Arrays or Objects. The contents of Arrays and Objects passed to functions also must obey this restriction, applied recursively. +Functions may have an arbitrary number of inputs, but exactly one output. Function arguments may be any kind of term. For example, suppose we have the following function: -It is worth noting that these restrictions only apply to the declared inputs of functions. When calling a function, any term that matches the declared terms may be passed. If the passed term has deeper components than the declared term, the deep parts will be folded into the appropriate variables in the declaration. - -As an example, suppose we have the user defined function below: ``` foo([x, {"bar": y}]) = z { z = {x: y} @@ -927,11 +924,13 @@ The outputs of user functions have some additional limitations, namely that they | } | > p([1, 2, 3], out) -p([1, 2, 3], out): eval_conflict_error: function "repl.p" produces conflicting outputs +p([1, 2, 3], out): eval_conflict_error: completely defined rules must produce exactly one value ``` It is possible in Rego to define a function more than once, to achieve a conditional selection of which function to execute: +Functions can be defined incrementally. + ```ruby > p(1, x) = y { | y = x @@ -956,6 +955,7 @@ It is possible in Rego to define a function more than once, to achieve a conditi ``` A given function call will execute all functions that match the signature given. If a call matches multiple functions, they must produce the same output, or else a conflict error will occur: + ```ruby > p(1, x) = y { | y = x @@ -966,7 +966,7 @@ A given function call will execute all functions that match the signature given. | } | > p(1, 2, y) -p(1, 2, y): eval_conflict_error: function "repl.p" produces conflicting outputs +p(1, 2, y): eval_conflict_error: completely defined rules must produce exactly one value ``` On the other hand, if a call matches no functions, then the result is undefined, and the query containing it will become unsatisfied. @@ -985,8 +985,6 @@ On the other hand, if a call matches no functions, then the result is undefined, false ``` -For a formal definition of the function syntax (as well as formal definitions for Array, Object and Scalar), see the [Language Reference](/language-reference.md#grammar) document. - ## Negation To generate the content of a [Virtual Document](/how-does-opa-work.md#virtual-documents), OPA attempts to bind variables in the body of the rule such that all expressions in the rule evaluate to True. diff --git a/docs/book/language-reference.md b/docs/book/language-reference.md index ed8cb11fee..3d71388900 100644 --- a/docs/book/language-reference.md +++ b/docs/book/language-reference.md @@ -136,11 +136,9 @@ package = "package" ref import = "import" package [ "as" var ] policy = { rule } rule = [ "default" ] rule-head { rule-body } -rule-head = var [ "[" term "]" ] [ = term ] +rule-head = var [ "(" rule-args ")" ] [ "[" term "]" ] [ = term ] +rule-args = term { "," term } rule-body = [ else [ = term ] ] "{" query "}" -function = func-head func-body -func-head = var "(" [ arg-term { , arg-term } ] ")" = term -func-body = "{" instructions "}" query = literal { ";" | [\r\n] literal } literal = ( expr | "not" expr ) { with-modifier } with-modifier = "with" term "as" term @@ -149,7 +147,6 @@ expr = term | expr-built-in | expr-infix expr-built-in = var [ "." var ] "(" [ term { , term } ] ")" expr-infix = [ term "=" ] term infix-operator term term = ref | var | scalar | array | object | set | array-compr -arg-term = scalar | var | arg-object | arg-array array-compr = "[" term "|" rule-body "]" set-compr = "{" term "|" rule-body "}" object-compr = "{" object-item "|" rule-body "}" @@ -166,11 +163,8 @@ scalar = string | NUMBER | TRUE | FALSE | NULL string = STRING | raw-string raw-string = "`" { CHAR-"`" } "`" array = "[" term { "," term } "]" -arg-array = "[" arg-term { "," arg-term } "]" object = "{" object-item { "," object-item } "}" object-item = ( scalar | ref | var ) ":" term -arg-object = "{" arg-object-item { "," arg-object-item } "}" -arg-object-item = ( scalar | ref ) ":" arg-term set = empty-set | non-empty-set non-empty-set = "{" term { "," term } "}" empty-set = "set(" ")" diff --git a/format/format.go b/format/format.go index 16aab7b26e..83f75809a3 100644 --- a/format/format.go +++ b/format/format.go @@ -11,6 +11,7 @@ import ( "sort" "github.com/open-policy-agent/opa/ast" + "github.com/open-policy-agent/opa/types" ) // Bytes formats Rego source code. The bytes provided do not have to be an entire @@ -75,7 +76,7 @@ func Ast(x interface{}) (formatted []byte, err error) { default: assertHasLocation(x) } - case *ast.Package, *ast.Import, *ast.Rule, *ast.Func, *ast.Head, ast.Body, *ast.Expr, *ast.With, *ast.Comment: + case *ast.Package, *ast.Import, *ast.Rule, *ast.Head, ast.Body, *ast.Expr, *ast.With, *ast.Comment: assertHasLocation(x) } return false @@ -93,10 +94,6 @@ func Ast(x interface{}) (formatted []byte, err error) { w.writeRule(x, nil) case *ast.Head: w.writeHead(x, nil) - case *ast.Func: - w.writeFunc(x, nil) - case *ast.FuncHead: - w.writeFuncHead(x, nil) case ast.Body: w.writeBody(x, nil) case *ast.Expr: @@ -134,7 +131,7 @@ func (w *writer) writeModule(module *ast.Module) { case *ast.Comment: comments = append(comments, x) return true - case *ast.Import, *ast.Rule, *ast.Func: + case *ast.Import, *ast.Rule: others = append(others, x) return true case *ast.Package: @@ -160,14 +157,11 @@ func (w *writer) writeModule(module *ast.Module) { comments = w.writePackage(pkg, comments) var imports []*ast.Import var rules []*ast.Rule - var funcs []*ast.Func for len(others) > 0 { imports, others = gatherImports(others) comments = w.writeImports(imports, comments) rules, others = gatherRules(others) comments = w.writeRules(rules, comments) - funcs, others = gatherFuncs(others) - comments = w.writeFuncs(funcs, comments) } for _, c := range comments { @@ -230,7 +224,16 @@ func (w *writer) writeRule(rule *ast.Rule, comments []*ast.Comment) []*ast.Comme w.up() comments = w.writeBody(rule.Body, comments) - comments = w.insertComments(comments, closingLoc('[', ']', '{', '}', rule.Location)) + + var close *ast.Location + + if len(rule.Head.Args) > 0 { + close = closingLoc('(', ')', '{', '}', rule.Location) + } else { + close = closingLoc('[', ']', '{', '}', rule.Location) + } + + comments = w.insertComments(comments, close) w.down() w.startLine() @@ -246,70 +249,27 @@ func (w *writer) writeRule(rule *ast.Rule, comments []*ast.Comment) []*ast.Comme func (w *writer) writeHead(head *ast.Head, comments []*ast.Comment) []*ast.Comment { w.write(head.Name.String()) + if len(head.Args) > 0 { + w.write("(") + var args []interface{} + for _, arg := range head.Args { + args = append(args, arg) + } + comments = w.writeIterable(args, head.Location, comments, w.listWriter()) + w.write(")") + } if head.Key != nil { w.write("[") comments = w.writeTerm(head.Key, comments) w.write("]") } - if head.Value != nil { + if head.Value != nil && ast.Compare(head.Value, ast.BooleanTerm(true)) != 0 { w.write(" = ") comments = w.writeTerm(head.Value, comments) } return comments } -func (w *writer) writeFuncs(funcs []*ast.Func, comments []*ast.Comment) []*ast.Comment { - for _, fn := range funcs { - comments = w.insertComments(comments, fn.Location) - comments = w.writeFunc(fn, comments) - w.blankLine() - } - return comments -} - -func (w *writer) writeFunc(fn *ast.Func, comments []*ast.Comment) []*ast.Comment { - if fn == nil { - return comments - } - - w.startLine() - comments = w.writeFuncHead(fn.Head, comments) - - w.write(" {") - w.endLine() - w.up() - - comments = w.writeBody(fn.Body, comments) - comments = w.insertComments(comments, closingLoc('(', ')', '{', '}', fn.Location)) - - w.down() - w.startLine() - w.write("}") - return comments -} - -func (w *writer) writeFuncHead(head *ast.FuncHead, comments []*ast.Comment) []*ast.Comment { - w.write(head.Name.String()) - - var args []interface{} - for _, arg := range head.Args { - args = append(args, arg) - } - - w.write("(") - comments = w.writeIterable(args, head.Location, comments, w.listWriter()) - w.write(")") - - // If a function's output is the value true, it can be written in shorthand - // as a void function. Formatting such functions should use the shorthand. - if head.Output.Equal(ast.BooleanTerm(true)) { - return comments - } - - w.write(" = ") - return w.writeTerm(head.Output, comments) -} - func (w *writer) insertComments(comments []*ast.Comment, loc *ast.Location) []*ast.Comment { before, at, comments := partitionComments(comments, loc) w.writeComments(before) @@ -344,7 +304,7 @@ func (w *writer) writeExpr(expr *ast.Expr, comments []*ast.Comment) []*ast.Comme switch t := expr.Terms.(type) { case []*ast.Term: - comments = w.writeFunctionCall(t, comments) + comments = w.writeFunctionCall(expr, comments) case *ast.Term: comments = w.writeTerm(t, comments) } @@ -367,28 +327,46 @@ func (w *writer) writeExpr(expr *ast.Expr, comments []*ast.Comment) []*ast.Comme return comments } -func (w *writer) writeFunctionCall(t []*ast.Term, comments []*ast.Comment) []*ast.Comment { - name := t[0].Value.String() - bi := ast.BuiltinMap[name] - if bi != nil && len(bi.Infix) > 0 { - switch len(bi.Args) { - case 3: - comments = w.writeTerm(t[3], comments) +func (w *writer) writeFunctionCall(expr *ast.Expr, comments []*ast.Comment) []*ast.Comment { + + terms := expr.Terms.([]*ast.Term) + + if expr.Infix { + name := terms[0].Value.String() + if bi, ok := ast.BuiltinMap[name]; ok { + // Handle relational operators (=, !=, >, etc.) + if types.Compare(bi.Decl.Result(), types.T) == 0 { + comments = w.writeTerm(terms[1], comments) + w.write(" " + string(bi.Infix) + " ") + return w.writeTerm(terms[2], comments) + } + // Handle arithmetic operators (+, *, &, etc.) + comments = w.writeTerm(terms[3], comments) w.write(" = ") - fallthrough - case 2: - comments = w.writeTerm(t[1], comments) + comments = w.writeTerm(terms[1], comments) w.write(" " + string(bi.Infix) + " ") - return w.writeTerm(t[2], comments) + return w.writeTerm(terms[2], comments) + } + comments = w.writeTerm(terms[len(terms)-1], comments) + w.write(" = " + string(terms[0].String()) + "(") + for i := 1; ; i++ { + comments = w.writeTerm(terms[i], comments) + if i < len(terms)-2 { + w.write(", ") + } else { + w.write(")") + break + } } + return comments } - w.write(string(t[0].String()) + "(") - for _, v := range t[1 : len(t)-1] { + w.write(string(terms[0].String()) + "(") + for _, v := range terms[1 : len(terms)-1] { comments = w.writeTerm(v, comments) w.write(", ") } - comments = w.writeTerm(t[len(t)-1], comments) + comments = w.writeTerm(terms[len(terms)-1], comments) w.write(")") return comments } @@ -704,7 +682,7 @@ loop: switch x := others[i].(type) { case *ast.Import: imports = append(imports, x) - case *ast.Rule, *ast.Func: + case *ast.Rule: break loop } } @@ -718,27 +696,13 @@ loop: switch x := others[i].(type) { case *ast.Rule: rules = append(rules, x) - case *ast.Import, *ast.Func: + case *ast.Import: break loop } } return rules, others[i:] } -func gatherFuncs(others []interface{}) (funcs []*ast.Func, rest []interface{}) { - i := 0 -loop: - for ; i < len(others); i++ { - switch x := others[i].(type) { - case *ast.Func: - funcs = append(funcs, x) - case *ast.Rule, *ast.Import: - break loop - } - } - return funcs, others[i:] -} - func locLess(a, b interface{}) bool { return locCmp(a, b) < 0 } @@ -777,9 +741,7 @@ func getLoc(x interface{}) *ast.Location { func closingLoc(skipOpen, skipClose, open, close byte, loc *ast.Location) *ast.Location { i, offset := 0, 0 - // Functions and Rules can have composites as their inputs. To avoid - // counting the braces around objects and sets, the [] in rule heads and - // the () in function heads can be skipped past before scanning. + // Skip past parens/brackets/braces in rule heads. if skipOpen > 0 { i, offset = skipPast(skipOpen, skipClose, loc) } diff --git a/format/format_test.go b/format/format_test.go index 5cae8734f5..c74e714ac1 100644 --- a/format/format_test.go +++ b/format/format_test.go @@ -70,8 +70,8 @@ func TestFormatSource(t *testing.T) { t.Fatalf("Failed to format file: %v", err) } - if !bytes.Equal(expected, formatted) { - t.Fatalf("Formatted bytes did not match expected:\n%s", string(formatted)) + if ln, at := differsAt(formatted, expected); ln != 0 { + t.Fatalf("Expected formatted bytes to equal expected bytes but differed near line %d / byte %d:\n%s", ln, at, formatted) } if _, err := ast.ParseModule(rego+".tmp", string(formatted)); err != nil { @@ -83,10 +83,30 @@ func TestFormatSource(t *testing.T) { t.Fatalf("Failed to double format file") } - if !bytes.Equal(expected, formatted) { - t.Fatal("Formatted bytes did not match expected") + if ln, at := differsAt(formatted, expected); ln != 0 { + t.Fatalf("Expected roundtripped bytes to equal expected bytes but differed near line %d / byte %d:\n%s", ln, at, formatted) } }) } } + +func differsAt(a, b []byte) (int, int) { + if bytes.Equal(a, b) { + return 0, 0 + } + minLen := len(a) + if minLen > len(b) { + minLen = len(b) + } + ln := 1 + for i := 0; i < minLen; i++ { + if a[i] == '\n' { + ln++ + } + if a[i] != b[i] { + return ln, i + } + } + return ln, minLen +} diff --git a/format/testfiles/test.rego b/format/testfiles/test.rego index d3ee1d3a93..f212ebd8de 100644 --- a/format/testfiles/test.rego +++ b/format/testfiles/test.rego @@ -14,6 +14,10 @@ import data.f.g default foo = false foo[x] { not x = g + f(x) = 1 + g( + x, "foo" + ) = z } globals = {"foo": "bar", @@ -66,7 +70,7 @@ string is on multiple lines` -foo([x, y, +fn2([x, y, z], {"foo": a}) = b { split(x, y, c) trim(a, z, d) # function comment 1 diff --git a/format/testfiles/test.rego.formatted b/format/testfiles/test.rego.formatted index 850cdfd93f..286e0c5408 100644 --- a/format/testfiles/test.rego.formatted +++ b/format/testfiles/test.rego.formatted @@ -15,6 +15,8 @@ default foo = false foo[x] { not x = g + 1 = f(x) + z = g(x, "foo") } globals = { @@ -72,10 +74,13 @@ string is on multiple lines` -foo([ - x, y, - z, -], {"foo": a}) = b { +fn2( + [ + x, y, + z, + ], + {"foo": a}, +) = b { split(x, y, c) trim(a, z, d) # function comment 1 split(c[0], d, b) diff --git a/rego/rego_test.go b/rego/rego_test.go index c862e1625b..1975725c39 100644 --- a/rego/rego_test.go +++ b/rego/rego_test.go @@ -55,9 +55,9 @@ func TestRegoCancellation(t *testing.T) { ast.RegisterBuiltin(&ast.Builtin{ Name: "test.sleep", - Args: []types.Type{ + Decl: types.NewFunction( types.S, - }, + ), }) topdown.RegisterFunctionalBuiltinVoid1("test.sleep", func(a ast.Value) error { diff --git a/repl/repl.go b/repl/repl.go index 9d54a1875d..12b2bc55c5 100644 --- a/repl/repl.go +++ b/repl/repl.go @@ -447,16 +447,17 @@ func (r *REPL) cmdUnset(ctx context.Context, args []string) error { term, err := ast.ParseTerm(args[0]) if err != nil { - return newBadArgsErr("argument must identify a rule or function") + return newBadArgsErr("argument must identify a rule") } v, ok := term.Value.(ast.Var) if !ok { - if !ast.RootDocumentRefs.Contains(term) { - return r.unsetFunc(ctx, term.Value) + ref, ok := term.Value.(ast.Ref) + if !ok || !ast.RootDocumentNames.Contains(ref[0]) { + return newBadArgsErr("arguments must identify a rule") } - v = term.Value.(ast.Ref)[0].Value.(ast.Var) + v = ref[0].Value.(ast.Var) } return r.unsetRule(ctx, v) @@ -482,29 +483,6 @@ func (r *REPL) unsetRule(ctx context.Context, v ast.Var) error { return r.recompile(ctx, cpy) } -func (r *REPL) unsetFunc(ctx context.Context, v ast.Value) error { - ref, ok := v.(ast.Ref) - if !ok { - return newBadArgsErr("arguments must identify a rule or function") - } - - mod := r.modules[r.currentModuleID] - funcs := []*ast.Func{} - for _, f := range mod.Funcs { - if f.Path().String() != ref.String() { - funcs = append(funcs, f) - } - } - if len(funcs) == len(mod.Funcs) { - fmt.Fprintln(r.output, "warning: no matching functions in current module") - return nil - } - - cpy := mod.Copy() - cpy.Funcs = funcs - return r.recompile(ctx, cpy) -} - func (r *REPL) timerStart(msg string) { if r.metrics != nil { r.metrics.Timer(msg).Start() @@ -601,37 +579,6 @@ func (r *REPL) compileRule(ctx context.Context, rule *ast.Rule) error { return nil } -func (r *REPL) compileFunc(ctx context.Context, fn *ast.Func) error { - r.timerStart(metrics.RegoQueryCompile) - defer r.timerStop(metrics.RegoQueryCompile) - - mod := r.modules[r.currentModuleID] - prev := mod.Funcs - mod.Funcs = append(mod.Funcs, fn) - ast.WalkFuncs(fn, func(f *ast.Func) bool { - f.Module = mod - return false - }) - - policies, err := r.loadModules(ctx, r.txn) - if err != nil { - return err - } - - for id, mod := range r.modules { - policies[id] = mod - } - - compiler := ast.NewCompiler().SetErrorLimit(r.errLimit) - - if compiler.Compile(policies); compiler.Failed() { - mod.Funcs = prev - return compiler.Errors - } - - return nil -} - func (r *REPL) evalBufferOne(ctx context.Context) error { line := strings.Join(r.buffer, "\n") @@ -770,8 +717,6 @@ func (r *REPL) evalStatement(ctx context.Context, stmt interface{}) error { return err case *ast.Rule: return r.compileRule(ctx, s) - case *ast.Func: - return r.compileFunc(ctx, s) case *ast.Import: return r.evalImport(s) case *ast.Package: diff --git a/repl/repl_test.go b/repl/repl_test.go index 8e692a518f..0bd0a976de 100644 --- a/repl/repl_test.go +++ b/repl/repl_test.go @@ -43,9 +43,9 @@ bar([x, y]) = z { mod2 := []byte(`package a.b.d -baz() = y { - a.b.c.foo("barfoobar.bar", x) - a.b.c.bar(x, y) +baz(_) = y { + data.a.b.c.foo("barfoobar.bar", x) + data.a.b.c.bar(x, y) }`) if err := store.UpsertPolicy(ctx, txn, "mod1", mod1); err != nil { @@ -63,11 +63,11 @@ baz() = y { var buf bytes.Buffer repl := newRepl(store, &buf) repl.OneShot(ctx, "json") - repl.OneShot(ctx, "a.b.d.baz(x)") + repl.OneShot(ctx, "data.a.b.d.baz(null, x)") exp := util.MustUnmarshalJSON([]byte(`[{"x": "foo"}]`)) result := util.MustUnmarshalJSON(buf.Bytes()) if !reflect.DeepEqual(exp, result) { - t.Fatalf("expected a.b.d.baz(x) to be %v, got %v", exp, result) + t.Fatalf("expected data.a.b.d.baz(x) to be %v, got %v", exp, result) } err := repl.OneShot(ctx, "p(x) = y { y = x+4 }") @@ -76,29 +76,29 @@ baz() = y { } buf.Reset() - repl.OneShot(ctx, "repl.p(5, y)") + repl.OneShot(ctx, "data.repl.p(5, y)") exp = util.MustUnmarshalJSON([]byte(`[{"y": 9}]`)) result = util.MustUnmarshalJSON(buf.Bytes()) if !reflect.DeepEqual(exp, result) { - t.Fatalf("expected repl.p(x) to be %v, got %v", exp, result) + t.Fatalf("expected datrepl.p(x) to be %v, got %v", exp, result) } repl.OneShot(ctx, "f(1, x) = y { y = x }") repl.OneShot(ctx, "f(2, x) = y { y = x*2 }") buf.Reset() - repl.OneShot(ctx, "repl.f(1, 2, y)") + repl.OneShot(ctx, "data.repl.f(1, 2, y)") exp = util.MustUnmarshalJSON([]byte(`[{"y": 2}]`)) result = util.MustUnmarshalJSON(buf.Bytes()) if !reflect.DeepEqual(exp, result) { - t.Fatalf("expected repl.f(1, 2, y) to be %v, got %v", exp, result) + t.Fatalf("expected data.repl.f(1, 2, y) to be %v, got %v", exp, result) } buf.Reset() - repl.OneShot(ctx, "repl.f(2, 2, y)") + repl.OneShot(ctx, "data.repl.f(2, 2, y)") exp = util.MustUnmarshalJSON([]byte(`[{"y": 4}]`)) result = util.MustUnmarshalJSON(buf.Bytes()) if !reflect.DeepEqual(exp, result) { - t.Fatalf("expected repl.f(2, 2, y) to be %v, got %v", exp, result) + t.Fatalf("expected data.repl.f(2, 2, y) to be %v, got %v", exp, result) } } @@ -383,20 +383,20 @@ func TestUnset(t *testing.T) { buffer.Reset() repl.OneShot(ctx, "p(x) = y { y = x }") - repl.OneShot(ctx, "unset repl.p") + repl.OneShot(ctx, "unset p") - err = repl.OneShot(ctx, "repl.p(5, y)") - if err == nil || err.Error() != `1 error occurred: 1:1: rego_type_error: undefined built-in function repl.p` { + err = repl.OneShot(ctx, "data.repl.p(5, y)") + if err == nil || err.Error() != `1 error occurred: 1:1: rego_type_error: undefined function data.repl.p` { t.Fatalf("Expected eval error (undefined built-in) but got err: '%v'", err) } buffer.Reset() repl.OneShot(ctx, "p(1, x) = y { y = x }") repl.OneShot(ctx, "p(2, x) = y { y = x+1 }") - repl.OneShot(ctx, "unset repl.p") + repl.OneShot(ctx, "unset p") - err = repl.OneShot(ctx, "repl.p(1, 2, y)") - if err == nil || err.Error() != `1 error occurred: 1:1: rego_type_error: undefined built-in function repl.p` { + err = repl.OneShot(ctx, "data.repl.p(1, 2, y)") + if err == nil || err.Error() != `1 error occurred: 1:1: rego_type_error: undefined function data.repl.p` { t.Fatalf("Expected eval error (undefined built-in) but got err: '%v'", err) } @@ -407,8 +407,8 @@ func TestUnset(t *testing.T) { } buffer.Reset() - repl.OneShot(ctx, `unset repl.q`) - if buffer.String() != "warning: no matching functions in current module\n" { + repl.OneShot(ctx, `unset q`) + if buffer.String() != "warning: no matching rules in current module\n" { t.Fatalf("Expected unset error for missing function but got: %v", buffer.String()) } diff --git a/tester/runner_test.go b/tester/runner_test.go index 70e8e06010..0aeb8a7e64 100644 --- a/tester/runner_test.go +++ b/tester/runner_test.go @@ -72,9 +72,9 @@ func TestRunnerCancel(t *testing.T) { ast.RegisterBuiltin(&ast.Builtin{ Name: "test.sleep", - Args: []types.Type{ + Decl: types.NewFunction( types.S, - }, + ), }) topdown.RegisterFunctionalBuiltinVoid1("test.sleep", func(a ast.Value) error { diff --git a/topdown/builtins.go b/topdown/builtins.go index 5819e9dacd..c64b643012 100644 --- a/topdown/builtins.go +++ b/topdown/builtins.go @@ -131,17 +131,6 @@ func RegisterFunctionalBuiltin1Out3(name string, fun FunctionalBuiltin1Out3) { builtinFunctions[name] = functionalWrapper1Out3(name, fun) } -func (t *Topdown) registerUserFunctions() { - if t.Compiler == nil { - return - } - - fns := t.Compiler.GetAllFuncs() - for name, fn := range fns { - t.userBuiltins[name] = userFunctionWrapper(name, fn) - } -} - // BuiltinEmpty is used to signal that the built-in function evaluated, but the // result is undefined so evaluation should not continue. type BuiltinEmpty struct{} @@ -244,61 +233,6 @@ func functionalWrapper1Out3(name string, fn FunctionalBuiltin1Out3) BuiltinFunc } } -func userFunctionWrapper(name string, fns []*ast.Func) BuiltinFunc { - return func(t *Topdown, expr *ast.Expr, iter Iterator) error { - operands := expr.Terms.([]*ast.Term)[1:] - resolved, err := resolveN(t, name, operands, len(operands)-1) - if err != nil { - return err - } - - var rTerms ast.Array - for _, r := range resolved { - rTerms = append(rTerms, ast.NewTerm(r)) - } - - var redo bool - var result *ast.Term - for _, fn := range fns { - child := t.Child(fn.Body) - if !redo { - child.traceEnter(fn) - redo = true - } else { - child.traceRedo(fn) - } - - arr := ast.Array(fn.Head.Args) - undo, err := evalEqUnify(child, rTerms, arr, nil, func(child *Topdown) error { - return eval(child, func(child *Topdown) error { - next := PlugTerm(fn.Head.Output, child.Binding) - if result != nil && !result.Equal(next) { - return &Error{ - Code: ConflictErr, - Message: fmt.Sprintf("function %s produces conflicting outputs", name), - Location: expr.Location, - } - } - result = next - child.traceExit(fn) - return nil - }) - }) - defer child.Unbind(undo) - - if err != nil { - return err - } - } - - if result == nil { - return nil - } - - return unifyAndContinue(t, iter, result.Value, operands[len(operands)-1].Value) - } -} - func handleFunctionalBuiltinErr(name string, loc *ast.Location, err error) error { switch err := err.(type) { case BuiltinEmpty: diff --git a/topdown/example_test.go b/topdown/example_test.go index 50494f0be0..7666eaa06d 100644 --- a/topdown/example_test.go +++ b/topdown/example_test.go @@ -155,10 +155,10 @@ func ExampleRegisterFunctionalBuiltin1() { // when it encounters your built-in. builtin := &ast.Builtin{ Name: "mybuiltins.upper", - Args: []types.Type{ + Decl: types.NewFunction( types.S, types.S, - }, + ), TargetPos: []int{1}, } diff --git a/topdown/topdown.go b/topdown/topdown.go index 8cddb891bf..0a32adb1ba 100644 --- a/topdown/topdown.go +++ b/topdown/topdown.go @@ -30,14 +30,13 @@ type Topdown struct { Tracer Tracer Context context.Context - txn storage.Transaction - locals *ast.ValueMap - refs *valueMapStack - cache *contextcache - qid uint64 - redos *redoStack - builtins builtins.Cache - userBuiltins map[string]BuiltinFunc + txn storage.Transaction + locals *ast.ValueMap + refs *valueMapStack + cache *contextcache + qid uint64 + redos *redoStack + builtins builtins.Cache } // ResetQueryIDs resets the query ID generator. This is only for test purposes. @@ -80,19 +79,17 @@ type redoStackElement struct { // New returns a new Topdown object without any bindings. func New(ctx context.Context, query ast.Body, compiler *ast.Compiler, store storage.Store, txn storage.Transaction) *Topdown { t := &Topdown{ - Context: ctx, - Query: query, - Compiler: compiler, - Store: store, - refs: newValueMapStack(), - txn: txn, - cache: newContextCache(), - qid: qidFactory.Next(), - redos: &redoStack{}, - builtins: builtins.Cache{}, - userBuiltins: map[string]BuiltinFunc{}, - } - t.registerUserFunctions() + Context: ctx, + Query: query, + Compiler: compiler, + Store: store, + refs: newValueMapStack(), + txn: txn, + cache: newContextCache(), + qid: qidFactory.Next(), + redos: &redoStack{}, + builtins: builtins.Cache{}, + } return t } @@ -905,13 +902,14 @@ func evalExpr(t *Topdown, iter Iterator) error { expr := PlugExpr(t.Current(), t.Binding) switch tt := expr.Terms.(type) { case []*ast.Term: + ref := tt[0].Value.(ast.Ref) + if ast.DefaultRootDocument.Equal(ref[0]) { + return evalRefRuleApply(t, ref, tt[1:], iter) + } name := tt[0].String() builtin, ok := builtinFunctions[name] if !ok { - builtin, ok = t.userBuiltins[name] - if !ok { - return unsupportedBuiltinErr(expr.Location) - } + return unsupportedBuiltinErr(expr.Location) } return builtin(t, expr, iter) case *ast.Term: @@ -1296,12 +1294,104 @@ func evalRefRule(t *Topdown, ref ast.Ref, path ast.Ref, iter Iterator) error { return nil } +func evalRefRuleApply(t *Topdown, path ast.Ref, args []*ast.Term, iter Iterator) error { + + index := t.Compiler.RuleIndex(path) + ir, err := index.Lookup(valueResolver{t}) + if err != nil || ir.Empty() { + return err + } + + // If function is being applied and return value is being ignored, append a + // wildcard variable to the expression so that it will unify below. + if len(args) == len(ir.Rules[0].Head.Args) { + args = append(args, ast.VarTerm(ast.WildcardPrefix+"apply")) + } + + resolved, err := resolveN(t, path.String(), args, len(args)-1) + if err != nil { + return err + } + + resolvedArgs := make(ast.Array, len(resolved)) + for i := range resolved { + resolvedArgs[i] = ast.NewTerm(resolved[i]) + } + + var redo bool + var result *ast.Term + + for _, rule := range ir.Rules { + next, err := evalRefRuleApplyOne(t, rule, resolvedArgs, redo, result) + if err != nil { + return err + } + redo = true + if next != nil { + result = next + } else { + chain := ir.Else[rule] + for i := range chain { + next, err := evalRefRuleApplyOne(t, chain[i], resolvedArgs, redo, result) + if err != nil { + return err + } + if next != nil { + result = next + break + } + } + } + } + + if result == nil { + return nil + } + + return unifyAndContinue(t, iter, result.Value, args[len(args)-1].Value) +} + +func evalRefRuleApplyOne(t *Topdown, rule *ast.Rule, args ast.Array, redo bool, last *ast.Term) (*ast.Term, error) { + child := t.Child(rule.Body) + if !redo { + child.traceEnter(rule) + } else { + child.traceRedo(rule) + } + var result *ast.Term + ruleArgs := ast.Array(rule.Head.Args) + undo, err := evalEqUnify(child, args, ruleArgs, nil, func(child *Topdown) error { + return eval(child, func(child *Topdown) error { + result = PlugTerm(rule.Head.Value, child.Binding) + if last != nil && ast.Compare(last, result) != 0 { + return completeDocConflictErr(t.currentLocation(rule)) + } + if last == nil && result != nil { + last = result + } + child.traceExit(rule) + return nil + }) + }) + if err != nil { + return nil, err + } + child.Unbind(undo) + return result, nil +} + func evalRefRuleCompleteDoc(t *Topdown, ref ast.Ref, suffix ast.Ref, ir *ast.IndexResult, iter Iterator) error { if ir.Empty() { return nil } + if len(ir.Rules) > 0 && len(ir.Rules[0].Head.Args) > 0 { + // Skip functions. Functions are not evaluated if args are unavailable. + // Functions are evaluated when the overall expression is evaluated. + return nil + } + // Determine cache key for rule set. Since the rule set must generate at // most one value, we can cache the result on any rule. var cacheKey *ast.Rule @@ -1319,6 +1409,7 @@ func evalRefRuleCompleteDoc(t *Topdown, ref ast.Ref, suffix ast.Ref, ir *ast.Ind var redo bool for _, rule := range ir.Rules { + next, err := evalRefRuleCompleteDocSingle(t, rule, redo, result) if err != nil { return err diff --git a/topdown/topdown_test.go b/topdown/topdown_test.go index 1c1ffa92d8..c5b16664e3 100644 --- a/topdown/topdown_test.go +++ b/topdown/topdown_test.go @@ -1493,10 +1493,9 @@ func TestTopDownTime(t *testing.T) { ast.RegisterBuiltin(&ast.Builtin{ Name: "test_sleep", - Args: []types.Type{ + Decl: types.NewFunction( types.S, - }, - TargetPos: []int{1}, + ), }) RegisterFunctionalBuiltinVoid1("test_sleep", func(a ast.Value) error { @@ -1782,7 +1781,7 @@ func TestTopDownPartialDocConstants(t *testing.T) { } } -func TestTopDownUserFunc(t *testing.T) { +func TestTopDownFunctions(t *testing.T) { modules := []string{`package ex foo(x) = y { @@ -1809,7 +1808,7 @@ func TestTopDownUserFunc(t *testing.T) { cross(x) = [a, b] { split(x, "i", y) foo(y[1], b) - test.foo(y[2], a) + data.test.foo(y[2], a) } arrays([x, y]) = [a, b] { @@ -1823,7 +1822,7 @@ func TestTopDownUserFunc(t *testing.T) { objects({"foo": x, "bar": y}) = z { foo(x, a) - test.foo(y, b) + data.test.foo(y, b) z = [a, b] } @@ -1879,7 +1878,14 @@ func TestTopDownUserFunc(t *testing.T) { multi4 = y { multi("foo", 2, y) - }`, + } + + always_true_fn(x) + + always_true { + always_true_fn(1) + } + `, ` package test @@ -1910,7 +1916,17 @@ func TestTopDownUserFunc(t *testing.T) { samepkg = y { foo("how do you do?", y) - }`} + }`, + ` + package test.l1.l3 + + g(x) = x`, + ` + package test.l1.l2 + + p = true + f(x) = x`, + } compiler := compileModules(modules) store := inmem.NewFromObject(loadSmallTestData()) @@ -1924,6 +1940,7 @@ func TestTopDownUserFunc(t *testing.T) { assertTopDownWithPath(t, compiler, store, "array params", []string{"ex", "arraysrule"}, "", `[["h", "h"], ["foo"]]`) assertTopDownWithPath(t, compiler, store, "object params", []string{"ex", "objectsrule"}, "", `[["h", "h"], "i"]`) assertTopDownWithPath(t, compiler, store, "ref func output", []string{"ex", "refoutput"}, "", `"h"`) + assertTopDownWithPath(t, compiler, store, "always_true", []string{"ex.always_true"}, ``, `true`) assertTopDownWithPath(t, compiler, store, "same package call", []string{"test", "samepkg"}, "", `"w do you do?"`) assertTopDownWithPath(t, compiler, store, "void good", []string{"ex", "voidGood"}, "", `true`) assertTopDownWithPath(t, compiler, store, "void bad", []string{"ex", "voidBad"}, "", "") @@ -1932,9 +1949,10 @@ func TestTopDownUserFunc(t *testing.T) { assertTopDownWithPath(t, compiler, store, "multi3", []string{"ex", "multi3"}, "", `20`) assertTopDownWithPath(t, compiler, store, "multi4", []string{"ex", "multi4"}, "", `"bar"`) assertTopDownWithPath(t, compiler, store, "multi cross package", []string{"test", "multi_cross_pkg"}, "", `["bar", 3]`) + assertTopDownWithPath(t, compiler, store, "skip-functions", []string{"test.l1"}, ``, `{"l2": {"p": true}, "l3": {}}`) } -func TestUserFunctionErrors(t *testing.T) { +func TestTopDownFunctionErrors(t *testing.T) { compiler := compileModules([]string{ ` package test1 @@ -1985,9 +2003,9 @@ func TestUserFunctionErrors(t *testing.T) { txn := storage.NewTransactionOrDie(ctx, store) defer store.Abort(ctx, txn) - assertTopDownWithPath(t, compiler, store, "function output conflict single", []string{"test1", "r"}, "", errors.New(`eval_conflict_error: function test1.p produces conflicting outputs`)) + assertTopDownWithPath(t, compiler, store, "function output conflict single", []string{"test1", "r"}, "", completeDocConflictErr(nil)) assertTopDownWithPath(t, compiler, store, "function input no match", []string{"test2", "r"}, "", "") - assertTopDownWithPath(t, compiler, store, "function output conflict multiple", []string{"test3", "r"}, "", errors.New(`eval_conflict_error: function test3.p produces conflicting outputs`)) + assertTopDownWithPath(t, compiler, store, "function output conflict multiple", []string{"test3", "r"}, "", completeDocConflictErr(nil)) } func TestTopDownWithKeyword(t *testing.T) { @@ -2043,6 +2061,7 @@ func TestTopDownElseKeyword(t *testing.T) { {"indexed", "ex.indexed", "2"}, {"conflict-1", "ex.conflict_1", completeDocConflictErr(nil)}, {"conflict-2", "ex.conflict_2", completeDocConflictErr(nil)}, + {"functions", "ex.fn_result", `["large", "small", "medium"]`}, } for _, tc := range tests { @@ -2106,6 +2125,16 @@ func TestTopDownElseKeyword(t *testing.T) { conflict_2 { false } else = false { true } conflict_2 { false } else = true { true } + + fn_result = [x,y,z] { fn(101, true, x); fn(100, true, y); fn(100, false, z) } + + fn(x, y) = "large" { + x > 100 + } else = "small" { + y = true + } else = "medium" { + true + } `, }) @@ -2265,9 +2294,9 @@ func TestTopDownUnsupportedBuiltin(t *testing.T) { func TestTopDownQueryCancellation(t *testing.T) { ast.RegisterBuiltin(&ast.Builtin{ Name: "test.sleep", - Args: []types.Type{ + Decl: types.NewFunction( types.S, - }, + ), }) RegisterFunctionalBuiltinVoid1("test.sleep", func(a ast.Value) error {