internal/core/eval: implement core evaluator
Does not yet implement imports and builtins.
- adds implementations for adt types
- adds eval package with higher-level evaluation
- some tweaks to compile
Change-Id: Ie91bd0bde8a03ed9957f306166042f56aebe19ce
Reviewed-on: https://cue-review.googlesource.com/c/cue/+/6280
Reviewed-by: Marcel van Lohuizen <mpvl@golang.org>
diff --git a/internal/core/adt/adt.go b/internal/core/adt/adt.go
index 2a91ff9..8ae9338 100644
--- a/internal/core/adt/adt.go
+++ b/internal/core/adt/adt.go
@@ -56,27 +56,28 @@
// An Evaluator provides a method to convert to a value.
type Evaluator interface {
Node
- // TODO: Eval(c Context, env *Environment) Value
+ evaluate(ctx *OpContext) Value
}
// A Resolver represents a reference somewhere else within a tree that resolves
// a value.
type Resolver interface {
Node
- // TODO: Resolve(c Context, env *Environment) Arc
+ resolve(ctx *OpContext) *Vertex
}
+type YieldFunc func(env *Environment, s *StructLit)
+
// A Yielder represents 0 or more labeled values of structs or lists.
type Yielder interface {
Node
- yielderNode()
- // TODO: Yield()
+ yield(ctx *OpContext, fn YieldFunc)
}
// A Validator validates a Value. All Validators are Values.
type Validator interface {
- // TODO: Validate(c Context, v Value) *Bottom
Value
+ validate(c *OpContext, v Value) *Bottom
}
// Value
@@ -164,8 +165,7 @@
// Decl and Yielder
-func (*LetClause) declNode() {}
-func (*LetClause) yielderNode() {}
+func (*LetClause) declNode() {}
// Decl and Elem
@@ -236,16 +236,12 @@
// Decl, Elem, and Yielder
-func (*ForClause) declNode() {}
-func (*ForClause) elemNode() {}
-func (*ForClause) yielderNode() {}
-func (*IfClause) declNode() {}
-func (*IfClause) elemNode() {}
-func (*IfClause) yielderNode() {}
+func (*ForClause) declNode() {}
+func (*ForClause) elemNode() {}
+func (*IfClause) declNode() {}
+func (*IfClause) elemNode() {}
-// Yielder
-
-func (*ValueClause) yielderNode() {}
+// Yielder only: ValueClause
// Node
diff --git a/internal/core/adt/binop.go b/internal/core/adt/binop.go
new file mode 100644
index 0000000..2fb3205
--- /dev/null
+++ b/internal/core/adt/binop.go
@@ -0,0 +1,364 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package adt
+
+import (
+ "bytes"
+ "math/big"
+ "strings"
+
+ "cuelang.org/go/cue/errors"
+ "github.com/cockroachdb/apd/v2"
+)
+
+var apdCtx apd.Context
+
+func init() {
+ apdCtx = apd.BaseContext
+ apdCtx.Precision = 24
+}
+
+// BinOp handles all operations except AndOp and OrOp. This includes processing
+// unary comparators such as '<4' and '=~"foo"'.
+//
+// BinOp returns nil if not both left and right are concrete.
+func BinOp(c *OpContext, op Op, left, right Value) Value {
+ leftKind := left.Kind()
+ rightKind := right.Kind()
+
+ const msg = "non-concrete value '%v' to operation '%s'"
+ if left.Concreteness() > Concrete {
+ return &Bottom{
+ Code: IncompleteError,
+ Err: errors.Newf(c.pos(), msg, c.Str(left), op),
+ }
+ }
+ if right.Concreteness() > Concrete {
+ return &Bottom{
+ Code: IncompleteError,
+ Err: errors.Newf(c.pos(), msg, c.Str(right), op),
+ }
+ }
+
+ if a, ok := left.(*Bottom); ok {
+ return CombineErrors(nil, a, right)
+ }
+ if b, ok := left.(*Bottom); ok {
+ return b
+ }
+
+ switch op {
+ case EqualOp:
+ switch {
+ case leftKind == NullKind && rightKind == NullKind:
+ return c.newBool(true)
+
+ case leftKind == NullKind || rightKind == NullKind:
+ return c.newBool(false)
+
+ case leftKind == BoolKind:
+ return c.newBool(c.BoolValue(left) == c.BoolValue(right))
+
+ case leftKind == StringKind:
+ // normalize?
+ return cmpTonode(c, op, strings.Compare(c.StringValue(left), c.StringValue(right)))
+
+ case leftKind == BytesKind:
+ return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op)))
+
+ case leftKind&NumKind != 0 && rightKind&NumKind != 0:
+ // n := c.newNum()
+ return cmpTonode(c, op, c.num(left, op).X.Cmp(&c.num(right, op).X))
+
+ case leftKind == ListKind && rightKind == ListKind:
+ x := c.Elems(left)
+ y := c.Elems(right)
+ if len(x) != len(y) {
+ return c.newBool(false)
+ }
+ for i, e := range x {
+ a, _ := c.Concrete(nil, e, op)
+ b, _ := c.Concrete(nil, y[i], op)
+ if !test(c, EqualOp, a, b) {
+ return c.newBool(false)
+ }
+ }
+ return c.newBool(true)
+ }
+
+ case NotEqualOp:
+ switch {
+ case leftKind == NullKind && rightKind == NullKind:
+ return c.newBool(false)
+
+ case leftKind == NullKind || rightKind == NullKind:
+ return c.newBool(true)
+
+ case leftKind == BoolKind:
+ return c.newBool(c.boolValue(left, op) != c.boolValue(right, op))
+
+ case leftKind == StringKind:
+ // normalize?
+ return cmpTonode(c, op, strings.Compare(c.StringValue(left), c.StringValue(right)))
+
+ case leftKind == BytesKind:
+ return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op)))
+
+ case leftKind&NumKind != 0 && rightKind&NumKind != 0:
+ // n := c.newNum()
+ return cmpTonode(c, op, c.num(left, op).X.Cmp(&c.num(right, op).X))
+
+ case leftKind == ListKind && rightKind == ListKind:
+ x := c.Elems(left)
+ y := c.Elems(right)
+ if len(x) != len(y) {
+ return c.newBool(false)
+ }
+ for i, e := range x {
+ a, _ := c.Concrete(nil, e, op)
+ b, _ := c.Concrete(nil, y[i], op)
+ if !test(c, EqualOp, a, b) {
+ return c.newBool(true)
+ }
+ }
+ return c.newBool(false)
+ }
+
+ case LessThanOp, LessEqualOp, GreaterEqualOp, GreaterThanOp:
+ switch {
+ case leftKind == StringKind && rightKind == StringKind:
+ // normalize?
+ return cmpTonode(c, op, strings.Compare(c.stringValue(left, op), c.stringValue(right, op)))
+
+ case leftKind == BytesKind && rightKind == BytesKind:
+ return cmpTonode(c, op, bytes.Compare(c.bytesValue(left, op), c.bytesValue(right, op)))
+
+ case leftKind&NumKind != 0 && rightKind&NumKind != 0:
+ // n := c.newNum(left, right)
+ return cmpTonode(c, op, c.num(left, op).X.Cmp(&c.num(right, op).X))
+ }
+
+ case BoolAndOp:
+ return c.newBool(c.boolValue(left, op) && c.boolValue(right, op))
+
+ case BoolOrOp:
+ return c.newBool(c.boolValue(left, op) || c.boolValue(right, op))
+
+ case MatchOp:
+ // if y.re == nil {
+ // // This really should not happen, but leave in for safety.
+ // b, err := Regexp.MatchString(str, x.str)
+ // if err != nil {
+ // return c.Errf(Src, "error parsing Regexp: %v", err)
+ // }
+ // return boolTonode(Src, b)
+ // }
+ return c.newBool(c.regexp(right).MatchString(c.stringValue(left, op)))
+
+ case NotMatchOp:
+ return c.newBool(!c.regexp(right).MatchString(c.stringValue(left, op)))
+
+ case AddOp:
+ switch {
+ case leftKind&NumKind != 0 && rightKind&NumKind != 0:
+ return numOp(c, apdCtx.Add, left, right, AddOp)
+
+ case leftKind == StringKind && rightKind == StringKind:
+ return c.NewString(c.StringValue(left) + c.StringValue(right))
+
+ case leftKind == BytesKind && rightKind == BytesKind:
+ ba := c.bytesValue(left, op)
+ bb := c.bytesValue(right, op)
+ b := make([]byte, len(ba)+len(bb))
+ copy(b, ba)
+ copy(b[len(ba):], bb)
+ return c.newBytes(b)
+
+ case leftKind == ListKind && rightKind == ListKind:
+ a := c.Elems(left)
+ b := c.Elems(right)
+ if err := c.Err(); err != nil {
+ return err
+ }
+ n := c.newList(c.src, nil)
+ if err := n.appendListArcs(a); err != nil {
+ return err
+ }
+ if err := n.appendListArcs(b); err != nil {
+ return err
+ }
+ // n.isList = true
+ // n.IsClosed = true
+ return n
+ }
+
+ case SubtractOp:
+ return numOp(c, apdCtx.Sub, left, right, op)
+
+ case MultiplyOp:
+ switch {
+ // float
+ case leftKind&NumKind != 0 && rightKind&NumKind != 0:
+ return numOp(c, apdCtx.Mul, left, right, op)
+
+ case leftKind == StringKind && rightKind == IntKind:
+ const as = "string multiplication"
+ return c.NewString(strings.Repeat(c.stringValue(left, as), int(c.uint64(right, as))))
+
+ case leftKind == IntKind && rightKind == StringKind:
+ const as = "string multiplication"
+ return c.NewString(strings.Repeat(c.stringValue(right, as), int(c.uint64(left, as))))
+
+ case leftKind == BytesKind && rightKind == IntKind:
+ const as = "bytes multiplication"
+ return c.newBytes(bytes.Repeat(c.bytesValue(left, as), int(c.uint64(right, as))))
+
+ case leftKind == IntKind && rightKind == BytesKind:
+ const as = "bytes multiplication"
+ return c.newBytes(bytes.Repeat(c.bytesValue(right, as), int(c.uint64(left, as))))
+
+ case leftKind == ListKind && rightKind == IntKind:
+ left, right = right, left
+ fallthrough
+
+ case leftKind == IntKind && rightKind == ListKind:
+ a := c.Elems(right)
+ n := c.newList(c.src, nil)
+ // n.IsClosed = true
+ index := int64(0)
+ for i := c.uint64(left, "list multiplier"); i > 0; i-- {
+ for _, a := range a {
+ f, _ := MakeLabel(a.Source(), index, IntLabel)
+ n.Arcs = append(n.Arcs, &Vertex{
+ Parent: n,
+ Label: f,
+ Conjuncts: a.Conjuncts,
+ })
+ index++
+ }
+ }
+ return n
+ }
+
+ case FloatQuotientOp:
+ if leftKind&NumKind != 0 && rightKind&NumKind != 0 {
+ v := numOp(c, apdCtx.Quo, left, right, op)
+ if n, ok := v.(*Num); ok {
+ n.K = FloatKind
+ }
+ return v
+ }
+
+ case IntDivideOp:
+ if leftKind&IntKind != 0 && rightKind&IntKind != 0 {
+ y := c.num(right, op)
+ if y.X.IsZero() {
+ return c.NewErrf("division by zero")
+ }
+ return intOp(c, (*big.Int).Div, c.num(left, op), y)
+ }
+
+ case IntModuloOp:
+ if leftKind&IntKind != 0 && rightKind&IntKind != 0 {
+ y := c.num(right, op)
+ if y.X.IsZero() {
+ return c.NewErrf("division by zero")
+ }
+ return intOp(c, (*big.Int).Mod, c.num(left, op), y)
+ }
+
+ case IntQuotientOp:
+ if leftKind&IntKind != 0 && rightKind&IntKind != 0 {
+ y := c.num(right, op)
+ if y.X.IsZero() {
+ return c.NewErrf("division by zero")
+ }
+ return intOp(c, (*big.Int).Quo, c.num(left, op), y)
+ }
+
+ case IntRemainderOp:
+ if leftKind&IntKind != 0 && rightKind&IntKind != 0 {
+ y := c.num(right, op)
+ if y.X.IsZero() {
+ return c.NewErrf("division by zero")
+ }
+ return intOp(c, (*big.Int).Rem, c.num(left, op), y)
+ }
+ }
+
+ return c.NewErrf("invalid operands %s and %s to '%s' (type %s and %s)",
+ c.Str(left), c.Str(right), op, left.Kind(), right.Kind())
+}
+
+func cmpTonode(c *OpContext, op Op, r int) Value {
+ result := false
+ switch op {
+ case LessThanOp:
+ result = r == -1
+ case LessEqualOp:
+ result = r != 1
+ case EqualOp, AndOp:
+ result = r == 0
+ case NotEqualOp:
+ result = r != 0
+ case GreaterEqualOp:
+ result = r != -1
+ case GreaterThanOp:
+ result = r == 1
+ }
+ return c.newBool(result)
+}
+
+type numFunc func(z, x, y *apd.Decimal) (apd.Condition, error)
+
+func numOp(c *OpContext, fn numFunc, a, b Value, op Op) Value {
+ var d apd.Decimal
+ x := c.num(a, op)
+ y := c.num(b, op)
+ cond, err := fn(&d, &x.X, &y.X)
+ if err != nil {
+ return c.NewErrf("failed arithmetic: %v", err)
+ }
+ if cond.DivisionByZero() {
+ return c.NewErrf("division by zero")
+ }
+ k := x.Kind() & y.Kind()
+ if k == 0 {
+ k = FloatKind
+ }
+ return c.newNum(&d, k)
+}
+
+type intFunc func(z, x, y *big.Int) *big.Int
+
+func intOp(c *OpContext, fn intFunc, a, b *Num) Value {
+ var d apd.Decimal
+
+ var x, y apd.Decimal
+ _, _ = apdCtx.RoundToIntegralValue(&x, &a.X)
+ if x.Negative {
+ x.Coeff.Neg(&x.Coeff)
+ }
+ _, _ = apdCtx.RoundToIntegralValue(&y, &b.X)
+ if y.Negative {
+ y.Coeff.Neg(&y.Coeff)
+ }
+ fn(&d.Coeff, &x.Coeff, &y.Coeff)
+ if d.Coeff.Sign() < 0 {
+ d.Coeff.Neg(&d.Coeff)
+ d.Negative = true
+ }
+ return c.newNum(&d, IntKind)
+}
diff --git a/internal/core/adt/composite.go b/internal/core/adt/composite.go
index bd5f99b..3246c5d 100644
--- a/internal/core/adt/composite.go
+++ b/internal/core/adt/composite.go
@@ -15,6 +15,8 @@
package adt
import (
+ "fmt"
+
"cuelang.org/go/cue/ast"
"cuelang.org/go/cue/errors"
"cuelang.org/go/cue/token"
@@ -26,6 +28,30 @@
type Environment struct {
Up *Environment
Vertex *Vertex
+
+ // DynamicLabel is only set when instantiating a field from a pattern
+ // constraint. It is used to resolve label references.
+ DynamicLabel Feature
+
+ // CloseID is a unique number that tracks a group of conjuncts that need
+ // belong to a single originating definition.
+ CloseID uint32
+
+ cache map[Expr]Value
+}
+
+// evalCached is used to look up let expressions. Caching let expressions
+// prevents a possible combinatorial explosion.
+func (e *Environment) evalCached(c *OpContext, x Expr) Value {
+ v, ok := e.cache[x]
+ if !ok {
+ if e.cache == nil {
+ e.cache = map[Expr]Value{}
+ }
+ v = c.eval(x)
+ e.cache[x] = v
+ }
+ return v
}
// A Vertex is a node in the value tree. It may be a leaf or internal node.
@@ -37,27 +63,171 @@
// It maintains source information such as a list of conjuncts that contributed
// to the value.
type Vertex struct {
- Parent *Vertex // Do we need this?
+ // Parent links to a parent Vertex. This parent should only be used to
+ // access the parent's Label field to find the relative location within a
+ // tree.
+ Parent *Vertex
// Label is the feature leading to this vertex.
Label Feature
+ // status indicates the evaluation progress of this vertex.
+ status VertexStatus
+
// Value is the value associated with this vertex. For lists and structs
// this is a sentinel value indicating its kind.
Value Value
+ // ChildErrors is the collection of all errors of children.
+ ChildErrors *Bottom
+
// The parent of nodes can be followed to determine the path within the
// configuration of this node.
// Value Value
Arcs []*Vertex // arcs are sorted in display order.
// Conjuncts lists the structs that ultimately formed this Composite value.
- // This includes all selected disjuncts. This information is used to compute
- // the topological sort of arcs.
+ // This includes all selected disjuncts.
+ //
+ // This value may be nil, in which case the Arcs are considered to define
+ // the final value of this Vertex.
Conjuncts []Conjunct
// Structs is a slice of struct literals that contributed to this value.
+ // This information is used to compute the topological sort of arcs.
Structs []*StructLit
+
+ // Closed contains information about how to interpret field labels for the
+ // various conjuncts with respect to which fields are allowed in this
+ // Vertex. If allows all fields if it is nil.
+ // The evaluator will first check existing fields before using this. So for
+ // simple cases, an Acceptor can always return false to close the Vertex.
+ Closed Acceptor
+}
+
+// VertexStatus indicates the evaluation progress of a Vertex.
+type VertexStatus int8
+
+const (
+ // Unprocessed indicates a Vertex has not been processed before.
+ // Value must be nil.
+ Unprocessed VertexStatus = iota
+
+ // Evaluating means that the current Vertex is being evaluated. If this is
+ // encountered it indicates a reference cycle. Value must be nil.
+ Evaluating
+
+ // Partial indicates that the result was only partially evaluated. It will
+ // need to be fully evaluated to get a complete results.
+ //
+ // TODO: this currently requires a renewed computation. Cache the
+ // nodeContext to allow reusing the computations done so far.
+ Partial
+
+ // EvaluatingArcs indicates that the arcs of the Vertex are currently being
+ // evaluated. If this is encountered it indicates a structural cycle.
+ // Value does not have to be nil
+ EvaluatingArcs
+
+ // Finalized means that this node is fully evaluated and that the results
+ // are save to use without further consideration.
+ Finalized
+)
+
+func (v *Vertex) Status() VertexStatus {
+ return v.status
+}
+
+func (v *Vertex) UpdateStatus(s VertexStatus) {
+ if v.status > s+1 {
+ panic(fmt.Sprintf("attempt to regress status from %d to %d", v.Status(), s))
+ }
+ if s == Finalized && v.Value == nil {
+ // panic("not finalized")
+ }
+ v.status = s
+}
+
+func (v *Vertex) IsErr() bool {
+ // if v.Status() > Evaluating {
+ if _, ok := v.Value.(*Bottom); ok {
+ return true
+ }
+ // }
+ return false
+}
+
+func (v *Vertex) Err(c *OpContext, state VertexStatus) *Bottom {
+ c.Unify(c, v, state)
+ if b, ok := v.Value.(*Bottom); ok {
+ return b
+ }
+ return nil
+}
+
+// func (v *Vertex) Evaluate()
+
+func (v *Vertex) Finalize(c *OpContext) {
+ if c == nil {
+ fmt.Println("WOT?")
+ }
+ c.Unify(c, v, Finalized)
+}
+
+func (v *Vertex) AddErr(ctx *OpContext, b *Bottom) {
+ v.Value = CombineErrors(nil, v.Value, b)
+ v.UpdateStatus(Finalized)
+}
+
+func (v *Vertex) SetValue(ctx *OpContext, state VertexStatus, value Value) *Bottom {
+ v.Value = value
+ v.UpdateStatus(state)
+ return nil
+}
+
+// ToVertex wraps v in a new Vertex, if necessary.
+func ToVertex(v Value) *Vertex {
+ switch x := v.(type) {
+ case *Vertex:
+ return x
+ default:
+ n := &Vertex{
+ status: Finalized,
+ Value: x,
+ }
+ n.AddConjunct(MakeConjunct(nil, v))
+ return n
+ }
+}
+
+// Unwrap returns the possibly non-concrete scalar value of v or nil if v is
+// a list, struct or of undefined type.
+func Unwrap(v Value) Value {
+ x, ok := v.(*Vertex)
+ if !ok {
+ return v
+ }
+ switch x.Value.(type) {
+ case *StructMarker, *ListMarker:
+ return v
+ default:
+ return x.Value
+ }
+}
+
+// Acceptor is a single interface that reports whether feature f is a valid
+// field label for this vertex.
+//
+// TODO: combine this with the StructMarker functionality?
+type Acceptor interface {
+ // Accept reports whether a given field is accepted as output.
+ // Pass an InvalidLabel to determine whether this is always open.
+ Accept(ctx *OpContext, f Feature) bool
+
+ // MatchAndInsert finds the conjuncts for optional fields, pattern
+ // constraints, and additional constraints that match f and inserts them in
+ // arc. Use f is 0 to match all additional constraints only.
+ MatchAndInsert(c *OpContext, arc *Vertex)
}
func (v *Vertex) Kind() Kind {
@@ -69,13 +239,57 @@
return v.Value.Kind()
}
+func (v *Vertex) IsClosed(ctx *OpContext) bool {
+ switch x := v.Value.(type) {
+ case *ListMarker:
+ // TODO: use one mechanism.
+ if x.IsOpen {
+ return false
+ }
+ if v.Closed == nil {
+ return true
+ }
+ return !v.Closed.Accept(ctx, InvalidLabel)
+
+ case *StructMarker:
+ if x.NeedClose {
+ return true
+ }
+ if v.Closed == nil {
+ return false
+ }
+ return !v.Closed.Accept(ctx, InvalidLabel)
+ }
+ return false
+}
+
+func (v *Vertex) Accept(ctx *OpContext, f Feature) bool {
+ if !v.IsClosed(ctx) || v.Lookup(f) != nil {
+ return true
+ }
+ if v.Closed != nil {
+ return v.Closed.Accept(ctx, f)
+ }
+ return false
+}
+
+func (v *Vertex) MatchAndInsert(ctx *OpContext, arc *Vertex) {
+ if v.Closed == nil {
+ return
+ }
+ if !v.Accept(ctx, arc.Label) {
+ return
+ }
+ v.Closed.MatchAndInsert(ctx, arc)
+}
+
func (v *Vertex) IsList() bool {
_, ok := v.Value.(*ListMarker)
return ok
}
-// lookup returns the Arc with label f if it exists or nil otherwise.
-func (v *Vertex) lookup(f Feature) *Vertex {
+// Lookup returns the Arc with label f if it exists or nil otherwise.
+func (v *Vertex) Lookup(f Feature) *Vertex {
for _, a := range v.Arcs {
if a.Label == f {
return a
@@ -84,10 +298,22 @@
return nil
}
+// Elems returns the regular elements of a list.
+func (v *Vertex) Elems() []*Vertex {
+ // TODO: add bookkeeping for where list arcs start and end.
+ a := make([]*Vertex, 0, len(v.Arcs))
+ for _, x := range v.Arcs {
+ if x.Label.IsInt() {
+ a = append(a, x)
+ }
+ }
+ return a
+}
+
// GetArc returns a Vertex for the outgoing arc with label f. It creates and
// ads one if it doesn't yet exist.
func (v *Vertex) GetArc(f Feature) (arc *Vertex, isNew bool) {
- arc = v.lookup(f)
+ arc = v.Lookup(f)
if arc == nil {
arc = &Vertex{Parent: v, Label: f}
v.Arcs = append(v.Arcs, arc)
@@ -100,19 +326,32 @@
// AddConjunct adds the given Conjuncts to v if it doesn't already exist.
func (v *Vertex) AddConjunct(c Conjunct) *Bottom {
+ if v.Value != nil {
+ // This is likely a bug in the evaluator and should not happen.
+ return &Bottom{Err: errors.Newf(token.NoPos, "cannot add conjunct")}
+ }
for _, x := range v.Conjuncts {
if x == c {
return nil
}
}
- if v.Value != nil {
- // This is likely a bug in the evaluator and should not happen.
- return &Bottom{Err: errors.Newf(token.NoPos, "cannot add conjunct")}
- }
+
v.Conjuncts = append(v.Conjuncts, c)
return nil
}
+func (v *Vertex) AddStructs(a ...*StructLit) {
+outer:
+ for _, s := range a {
+ for _, t := range v.Structs {
+ if t == s {
+ continue outer
+ }
+ }
+ v.Structs = append(v.Structs, s)
+ }
+}
+
// Path computes the sequence of Features leading from the root to of the
// instance to this Vertex.
func (v *Vertex) Path() []Feature {
@@ -127,6 +366,23 @@
return append(a, v.Label)
}
+func (v *Vertex) appendListArcs(arcs []*Vertex) (err *Bottom) {
+ for _, a := range arcs {
+ // TODO(list): BUG this only works if lists do not have definitions
+ // fields.
+ label, err := MakeLabel(a.Source(), int64(len(v.Arcs)), IntLabel)
+ if err != nil {
+ return &Bottom{Src: a.Source(), Err: err}
+ }
+ v.Arcs = append(v.Arcs, &Vertex{
+ Parent: v,
+ Label: label,
+ Conjuncts: a.Conjuncts,
+ })
+ }
+ return nil
+}
+
// An Conjunct is an Environment-Expr pair. The Environment is the starting
// point for reference lookup for any reference contained in X.
type Conjunct struct {
@@ -139,10 +395,14 @@
// MakeConjunct creates a conjunct from the given environment and node.
// It panics if x cannot be used as an expression.
func MakeConjunct(env *Environment, x Node) Conjunct {
+ if env == nil {
+ // TODO: better is to pass one.
+ env = &Environment{}
+ }
switch x.(type) {
case Expr, interface{ expr() Expr }:
default:
- panic("invalid Node type")
+ panic(fmt.Sprintf("invalid Node type %T", x))
}
return Conjunct{env, x}
}
diff --git a/internal/core/adt/context.go b/internal/core/adt/context.go
new file mode 100644
index 0000000..8f9e07e
--- /dev/null
+++ b/internal/core/adt/context.go
@@ -0,0 +1,832 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package adt
+
+import (
+ "fmt"
+ "regexp"
+
+ "github.com/cockroachdb/apd/v2"
+ "golang.org/x/text/runes"
+
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/format"
+ "cuelang.org/go/cue/token"
+)
+
+// A Unifier implements a strategy for CUE's unification operation. It must
+// handle the following aspects of CUE evaluation:
+//
+// - Structural and reference cycles
+// - Non-monotic validation
+// - Fixed-point computation of comprehension
+//
+type Unifier interface {
+ // Unify fully unifies all values of a Vertex to completion and stores
+ // the result in the Vertex. If Unify was called on v before it returns
+ // the cached results.
+ Unify(c *OpContext, v *Vertex, state VertexStatus) // error or bool?
+
+ // Evaluate returns the evaluated value associated with v. It may return a
+ // partial result. That is, if v was not yet unified, it may return a
+ // concrete value that must be the result assuming the configuration has no
+ // errors.
+ //
+ // This semantics allows CUE to break reference cycles in a straightforward
+ // manner.
+ //
+ // Vertex v must still be evaluated at some point to catch the underlying
+ // error.
+ //
+ Evaluate(c *OpContext, v *Vertex) Value
+}
+
+// Runtime defines an interface for low-level representation conversion and
+// lookup.
+type Runtime interface {
+ // StringIndexer allows for converting string labels to and from a
+ // canonical numeric representation.
+ StringIndexer
+}
+
+type Config struct {
+ Runtime
+ Unifier
+
+ Format func(Node) string
+}
+
+type config = Config
+
+// New creates an operation context.
+func New(v *Vertex, cfg *Config) *OpContext {
+ if cfg.Runtime == nil {
+ panic("nil Runtime")
+ }
+ if cfg.Unifier == nil {
+ panic("nil Unifier")
+ }
+ ctx := &OpContext{
+ config: *cfg,
+ }
+ if v != nil {
+ ctx.e = &Environment{Up: nil, Vertex: v}
+ }
+ return ctx
+}
+
+// An OpContext associates a Runtime and Unifier to allow evaluating the types
+// defined in this package. It tracks errors provides convenience methods for
+// evaluating values.
+type OpContext struct {
+ config
+
+ e *Environment
+ src ast.Node
+ errs *Bottom
+
+ // TODO: remove use of tentative. Should be possible if incomplete
+ // handling is done better.
+ tentative int // set during comprehension evaluation
+}
+
+// If IsTentative is set, evaluation of an arc should not finalize
+// to non-concrete values.
+func (c *OpContext) IsTentative() bool {
+ return c.tentative > 0
+}
+
+func (c *OpContext) Pos() token.Pos {
+ if c.src == nil {
+ return token.NoPos
+ }
+ return c.src.Pos()
+}
+
+func (c *OpContext) Source() ast.Node {
+ return c.src
+}
+
+// NewContext creates an operation context.
+func NewContext(r Runtime, u Unifier, v *Vertex) *OpContext {
+ return New(v, &Config{Runtime: r, Unifier: u})
+}
+
+func (c *OpContext) pos() token.Pos {
+ if c.src == nil {
+ return token.NoPos
+ }
+ return c.src.Pos()
+}
+
+func (c *OpContext) spawn(node *Vertex) *OpContext {
+ sub := *c
+ node.Parent = c.e.Vertex
+ sub.e = &Environment{Up: c.e, Vertex: node}
+ if c.e != nil {
+ sub.e.CloseID = c.e.CloseID
+ }
+ return &sub
+}
+
+func (c *OpContext) Env(upCount int32) *Environment {
+ e := c.e
+ for ; upCount > 0; upCount-- {
+ e = e.Up
+ }
+ return e
+}
+
+func (c *OpContext) relNode(upCount int32) *Vertex {
+ e := c.e
+ for ; upCount > 0; upCount-- {
+ e = e.Up
+ }
+ return e.Vertex
+}
+
+func (c *OpContext) relLabel(upCount int32) Feature {
+ // locate current label.
+ e := c.e
+ for ; upCount > 0; upCount-- {
+ e = e.Up
+ }
+ return e.DynamicLabel
+}
+
+func (c *OpContext) concreteIsPossible(x Expr) bool {
+ if v, ok := x.(Value); ok {
+ if v.Concreteness() > Concrete {
+ c.AddErrf("value can never become concrete")
+ return false
+ }
+ }
+ return true
+}
+
+// HasErr reports whether any error was reported, including whether value
+// was incomplete.
+func (c *OpContext) HasErr() bool {
+ return c.errs != nil
+}
+
+func (c *OpContext) Err() *Bottom {
+ b := c.errs
+ c.errs = nil
+ return b
+}
+
+func (c *OpContext) addErrf(code ErrorCode, pos token.Pos, msg string, args ...interface{}) {
+ for i, a := range args {
+ switch x := a.(type) {
+ case Node:
+ args[i] = c.Str(x)
+ case ast.Node:
+ b, _ := format.Node(x)
+ args[i] = string(b)
+ case Feature:
+ args[i] = x.SelectorString(c.Runtime)
+ }
+ }
+
+ err := errors.Newf(pos, msg, args...)
+ c.addErr(code, err)
+}
+
+func (c *OpContext) addErr(code ErrorCode, err errors.Error) {
+ c.errs = CombineErrors(c.src, c.errs, &Bottom{Code: code, Err: err})
+}
+
+// AddBottom records an error in OpContext.
+func (c *OpContext) AddBottom(b *Bottom) {
+ c.errs = CombineErrors(c.src, c.errs, b)
+}
+
+// AddErr records an error in OpContext. It returns errors collected so far.
+func (c *OpContext) AddErr(err errors.Error) *Bottom {
+ if err != nil {
+ c.errs = CombineErrors(c.src, c.errs, &Bottom{Err: err})
+ }
+ return c.errs
+}
+
+// NewErrf creates a *Bottom value and returns it. The returned uses the
+// current source as the point of origin of the error.
+func (c *OpContext) NewErrf(format string, args ...interface{}) *Bottom {
+ err := errors.Newf(c.pos(), format, args...)
+ return &Bottom{Src: c.src, Err: err, Code: EvalError}
+}
+
+// AddErrf records an error in OpContext. It returns errors collected so far.
+func (c *OpContext) AddErrf(format string, args ...interface{}) *Bottom {
+ return c.AddErr(errors.Newf(c.pos(), format, args...))
+}
+
+func (c *OpContext) validate(v Value) *Bottom {
+ switch x := v.(type) {
+ case *Bottom:
+ return x
+ case *Vertex:
+ v := c.Unifier.Evaluate(c, x)
+ if b, ok := v.(*Bottom); ok {
+ return b
+ }
+ }
+ return nil
+}
+
+type frame struct {
+ env *Environment
+ err *Bottom
+ src ast.Node
+}
+
+func (c *OpContext) PushState(env *Environment, src ast.Node) (saved frame) {
+ saved.env = c.e
+ saved.err = c.errs
+ saved.src = c.src
+
+ c.errs = nil
+ if src != nil {
+ c.src = src
+ }
+ if env != nil {
+ c.e = env
+ }
+
+ return saved
+}
+
+func (c *OpContext) PopState(s frame) *Bottom {
+ err := c.errs
+ c.e = s.env
+ c.errs = s.err
+ c.src = s.src
+ return err
+}
+
+// Resolve finds a node in the tree.
+//
+// Should only be used to insert Conjuncts. TODO: perhaps only return Conjuncts
+// and error.
+func (c *OpContext) Resolve(env *Environment, r Resolver) (*Vertex, *Bottom) {
+ s := c.PushState(env, r.Source())
+
+ arc := r.resolve(c)
+ // TODO: check for cycle errors?
+
+ err := c.PopState(s)
+ if err != nil {
+ return nil, err
+ }
+
+ return arc, err
+}
+
+// Validate calls validates value for the given validator.
+func (c *OpContext) Validate(check Validator, value Value) *Bottom {
+ return check.validate(c, value)
+}
+
+// Yield evaluates a Yielder and calls f for each result.
+func (c *OpContext) Yield(env *Environment, y Yielder, f YieldFunc) *Bottom {
+ s := c.PushState(env, y.Source())
+
+ c.tentative++
+
+ y.yield(c, f)
+
+ c.tentative--
+
+ return c.PopState(s)
+
+}
+
+// Concrete returns the concrete value of x after evaluating it.
+// msg is used to mention the context in which an error occurred, if any.
+func (c *OpContext) Concrete(env *Environment, x Expr, msg interface{}) (result Value, complete bool) {
+
+ v, complete := c.Evaluate(env, x)
+
+ v, ok := c.getDefault(v)
+ if !ok {
+ return v, false
+ }
+
+ if !IsConcrete(v) {
+ complete = false
+ b := c.NewErrf("non-concrete value %v in operand to %s", c.Str(v), msg)
+ b.Code = IncompleteError
+ v = b
+ }
+
+ if !complete {
+ return v, complete
+ }
+
+ return v, true
+}
+
+func (c *OpContext) getDefault(v Value) (result Value, ok bool) {
+ var d *Disjunction
+ switch x := v.(type) {
+ default:
+ return v, true
+
+ case *Vertex:
+ switch t := x.Value.(type) {
+ case *Disjunction:
+ d = t
+
+ case *StructMarker, *ListMarker:
+ return v, true
+
+ default:
+ return t, true
+ }
+
+ case *Disjunction:
+ d = x
+ }
+
+ if d.NumDefaults != 1 {
+ c.addErrf(IncompleteError, c.pos(),
+ "unresolved disjunction %s (type %s)", c.Str(d), d.Kind())
+ return nil, false
+ }
+ return c.getDefault(d.Values[0])
+}
+
+// Evaluate evaluates an expression within the given environment and indicates
+// whether the result is complete. It will always return a non-nil result.
+func (c *OpContext) Evaluate(env *Environment, x Expr) (result Value, complete bool) {
+ s := c.PushState(env, x.Source())
+
+ val := c.eval(x)
+
+ complete = true
+
+ if err, _ := val.(*Bottom); err != nil && err.IsIncomplete() {
+ complete = false
+ }
+ if val == nil {
+ complete = false
+ // TODO ENSURE THIS DOESN"T HAPPEN>
+ val = &Bottom{
+ Code: IncompleteError,
+ Err: errors.Newf(token.NoPos, "UNANTICIPATED ERROR"),
+ }
+
+ }
+
+ _ = c.PopState(s)
+
+ if !complete || val == nil {
+ return val, false
+ }
+
+ return val, true
+}
+
+// value evaluates expression v within the current environment. The result may
+// be nil if the result is incomplete. value leaves errors untouched to that
+// they can be collected by the caller.
+func (c *OpContext) value(x Expr) (result Value) {
+ v := c.evalState(x, Partial)
+
+ v, _ = c.getDefault(v)
+ return v
+}
+
+func (c *OpContext) eval(v Expr) (result Value) {
+ return c.evalState(v, Partial)
+}
+
+func (c *OpContext) evalState(v Expr, state VertexStatus) (result Value) {
+ savedSrc := c.src
+ c.src = v.Source()
+ err := c.errs
+ c.errs = nil
+
+ defer func() {
+ c.errs = CombineErrors(c.src, c.errs, err)
+ c.errs = CombineErrors(c.src, c.errs, result)
+ if c.errs != nil {
+ result = c.errs
+ }
+ c.src = savedSrc
+ }()
+
+ switch x := v.(type) {
+ case Value:
+ return x
+
+ case Evaluator:
+ v := x.evaluate(c)
+ return v
+
+ case Resolver:
+ arc := x.resolve(c)
+ if c.HasErr() {
+ return nil
+ }
+ if isIncomplete(arc) {
+ if arc != nil {
+ return arc.Value
+ }
+ return nil
+ }
+
+ v := c.Unifier.Evaluate(c, arc)
+ return v
+
+ default:
+ // return nil
+ c.AddErrf("unexpected Expr type %T", v)
+ }
+ return nil
+}
+
+func (c *OpContext) lookup(x *Vertex, pos token.Pos, l Feature) *Vertex {
+ if l == InvalidLabel || x == nil {
+ // TODO: is it possible to have an invalid label here? Maybe through the
+ // API?
+ return &Vertex{}
+ }
+
+ var kind Kind
+ if x.Value != nil {
+ kind = x.Value.Kind()
+ }
+
+ switch kind {
+ case StructKind:
+ if l.Typ() == IntLabel {
+ c.addErrf(0, pos, "invalid struct selector %s (type int)", l)
+ }
+
+ case ListKind:
+ switch {
+ case l.Typ() == IntLabel:
+ switch {
+ case l.Index() < 0:
+ c.addErrf(0, pos, "invalid list index %s (index must be non-negative)", l)
+ return nil
+ case l.Index() > len(x.Arcs):
+ c.addErrf(0, pos, "invalid list index %s (out of bounds)", l)
+ return nil
+ }
+
+ case l.IsDef():
+
+ default:
+ c.addErrf(0, pos, "invalid list index %s (type string)", l)
+ return nil
+ }
+
+ default:
+ // TODO: ?
+ // if !l.IsDef() {
+ // c.addErrf(0, nil, "invalid selector %s (must be definition for non-structs)", l)
+ // }
+ }
+
+ a := x.Lookup(l)
+ if a == nil {
+ code := IncompleteError
+ if !x.Accept(c, l) {
+ code = 0
+ }
+ // TODO: if the struct was a literal struct, we can also treat it as
+ // closed and make this a permanent error.
+ c.addErrf(code, pos, "undefined field %s", l.SelectorString(c.Runtime))
+ }
+ return a
+}
+
+func (c *OpContext) Label(x Value) Feature {
+ return labelFromValue(c, x)
+}
+
+func (c *OpContext) typeError(v Value, k Kind) {
+ if isError(v) {
+ return
+ }
+ if !IsConcrete(v) && v.Kind()&k != 0 {
+ c.addErrf(IncompleteError, pos(v),
+ "incomplete %s value '%s'", k, c.Str(v))
+ } else {
+ c.AddErrf("cannot use %s (type %s) as type %s", c.Str(v), v.Kind(), k)
+ }
+}
+
+func (c *OpContext) typeErrorAs(v Value, k Kind, as interface{}) {
+ if as == nil {
+ c.typeError(v, k)
+ return
+ }
+ if isError(v) {
+ return
+ }
+ if !IsConcrete(v) && v.Kind()&k != 0 {
+ c.addErrf(IncompleteError, pos(v),
+ "incomplete %s value '%s' in as", k, c.Str(v), as)
+ } else {
+ c.AddErrf("cannot use %s (type %s) as type %s in %v",
+ c.Str(v), v.Kind(), k, as)
+ }
+}
+
+var emptyNode = &Vertex{}
+
+func pos(x Node) token.Pos {
+ if x.Source() == nil {
+ return token.NoPos
+ }
+ return x.Source().Pos()
+}
+
+func (c *OpContext) node(x Expr, state VertexStatus) *Vertex {
+ v := c.evalState(x, state)
+
+ node, ok := v.(*Vertex)
+ if !ok {
+ if isError(v) {
+ if v == nil {
+ c.addErrf(IncompleteError, pos(x), "incomplete value %s", c.Str(x))
+ }
+ return emptyNode
+ }
+ if v.Kind()&StructKind != 0 {
+ c.addErrf(IncompleteError, pos(x),
+ "incomplete feed source value %s (type %s)",
+ x.Source(), v.Kind())
+ } else {
+ c.addErrf(0, pos(x),
+ "invalid operand %s (found %s, want list or struct)",
+ x.Source(), v.Kind())
+
+ }
+ return emptyNode
+ }
+ return node.Default()
+}
+
+// Elems returns the elements of a list.
+func (c *OpContext) Elems(v Value) []*Vertex {
+ list := c.list(v)
+ return list.Elems()
+}
+
+func (c *OpContext) list(v Value) *Vertex {
+ x, ok := v.(*Vertex)
+ if !ok || !x.IsList() {
+ c.typeError(v, ListKind)
+ return emptyNode
+ }
+ return x
+}
+
+func (c *OpContext) scalar(v Value) Value {
+ v = Unwrap(v)
+ switch v.(type) {
+ case *Null, *Bool, *Num, *String, *Bytes:
+ default:
+ c.typeError(v, ScalarKinds)
+ }
+ return v
+}
+
+var zero = &Num{K: NumKind}
+
+func (c *OpContext) num(v Value, as interface{}) *Num {
+ v = Unwrap(v)
+ if isError(v) {
+ return zero
+ }
+ x, ok := v.(*Num)
+ if !ok {
+ c.typeErrorAs(v, NumKind, as)
+ return zero
+ }
+ return x
+}
+
+func (c *OpContext) Int64(v Value) int64 {
+ v = Unwrap(v)
+ if isError(v) {
+ return 0
+ }
+ x, ok := v.(*Num)
+ if !ok {
+ c.typeError(v, IntKind)
+ return 0
+ }
+ i, err := x.X.Int64()
+ if err != nil {
+ c.AddErrf("number is not an int64: %v", err)
+ return 0
+ }
+ return i
+}
+
+func (c *OpContext) uint64(v Value, as string) uint64 {
+ v = Unwrap(v)
+ if isError(v) {
+ return 0
+ }
+ x, ok := v.(*Num)
+ if !ok {
+ c.typeErrorAs(v, IntKind, as)
+ return 0
+ }
+ if x.X.Negative {
+ // TODO: improve message
+ c.AddErrf("cannot convert negative number to uint64")
+ return 0
+ }
+ if !x.X.Coeff.IsUint64() {
+ // TODO: improve message
+ c.AddErrf("cannot convert number %s to uint64", x.X)
+ return 0
+ }
+ return x.X.Coeff.Uint64()
+}
+
+func (c *OpContext) BoolValue(v Value) bool {
+ return c.boolValue(v, nil)
+}
+
+func (c *OpContext) boolValue(v Value, as interface{}) bool {
+ v = Unwrap(v)
+ if isError(v) {
+ return false
+ }
+ x, ok := v.(*Bool)
+ if !ok {
+ c.typeErrorAs(v, BoolKind, as)
+ return false
+ }
+ return x.B
+}
+
+func (c *OpContext) StringValue(v Value) string {
+ return c.stringValue(v, nil)
+}
+
+func (c *OpContext) stringValue(v Value, as interface{}) string {
+ v = Unwrap(v)
+ if isError(v) {
+ return ""
+ }
+ switch x := v.(type) {
+ case *String:
+ return x.Str
+
+ case *Bytes:
+ return string(runes.ReplaceIllFormed().Bytes(x.B))
+
+ case *Num:
+ return x.X.String()
+
+ default:
+ if as == nil {
+ c.typeError(v, StringKind)
+ } else {
+ c.typeErrorAs(v, StringKind, as)
+ }
+ }
+ return ""
+}
+
+func (c *OpContext) bytesValue(v Value, as interface{}) []byte {
+ v = Unwrap(v)
+ if isError(v) {
+ return nil
+ }
+ x, ok := v.(*Bytes)
+ if !ok {
+ c.typeErrorAs(v, BytesKind, as)
+ return nil
+ }
+ return x.B
+}
+
+var matchNone = regexp.MustCompile("^$")
+
+func (c *OpContext) regexp(v Value) *regexp.Regexp {
+ v = Unwrap(v)
+ if isError(v) {
+ return matchNone
+ }
+ switch x := v.(type) {
+ case *String:
+ if x.RE != nil {
+ return x.RE
+ }
+ // TODO: synchronization
+ p, err := regexp.Compile(x.Str)
+ if err != nil {
+ // FatalError? How to cache error
+ c.AddErrf("invalid regexp: %s", err)
+ x.RE = matchNone
+ } else {
+ x.RE = p
+ }
+ return x.RE
+
+ case *Bytes:
+ if x.RE != nil {
+ return x.RE
+ }
+ // TODO: synchronization
+ p, err := regexp.Compile(string(x.B))
+ if err != nil {
+ c.AddErrf("invalid regexp: %s", err)
+ x.RE = matchNone
+ } else {
+ x.RE = p
+ }
+ return x.RE
+
+ default:
+ c.typeError(v, StringKind|BytesKind)
+ return matchNone
+ }
+}
+
+func (c *OpContext) newNum(d *apd.Decimal, k Kind, sources ...Node) Value {
+ if c.HasErr() {
+ return c.Err()
+ }
+ return &Num{Src: c.src, X: *d, K: k}
+}
+
+func (c *OpContext) NewInt64(n int64, sources ...Node) Value {
+ if c.HasErr() {
+ return c.Err()
+ }
+ d := apd.New(n, 0)
+ return &Num{Src: c.src, X: *d, K: IntKind}
+}
+
+func (c *OpContext) NewString(s string) Value {
+ if c.HasErr() {
+ return c.Err()
+ }
+ return &String{Src: c.src, Str: s}
+}
+
+func (c *OpContext) newBytes(b []byte) Value {
+ if c.HasErr() {
+ return c.Err()
+ }
+ return &Bytes{Src: c.src, B: b}
+}
+
+func (c *OpContext) newBool(b bool) Value {
+ if c.HasErr() {
+ return c.Err()
+ }
+ return &Bool{Src: c.src, B: b}
+}
+
+func (c *OpContext) newList(src ast.Node, parent *Vertex) *Vertex {
+ return &Vertex{Parent: parent, Value: &ListMarker{}}
+}
+
+// Str reports a debug string of x.
+func (c *OpContext) Str(x Node) string {
+ if c.Format == nil {
+ return fmt.Sprintf("%T", x)
+ }
+ return c.Format(x)
+}
+
+// NewList returns a new list for the given values.
+func (c *OpContext) NewList(values ...Value) *Vertex {
+ // TODO: consider making this a literal list instead.
+ list := &ListLit{}
+ v := &Vertex{
+ Conjuncts: []Conjunct{{Env: nil, x: list}},
+ }
+
+ for _, x := range values {
+ list.Elems = append(list.Elems, x)
+ }
+ c.Unify(c, v, Finalized)
+ return v
+}
diff --git a/internal/core/adt/default.go b/internal/core/adt/default.go
new file mode 100644
index 0000000..ae7ca8e
--- /dev/null
+++ b/internal/core/adt/default.go
@@ -0,0 +1,113 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package adt
+
+// Default returns the default value or itself if there is no default.
+func Default(v Value) Value {
+ switch x := v.(type) {
+ case *Vertex:
+ return x.Default()
+ case *Disjunction:
+ return x.Default()
+ default:
+ return v
+ }
+}
+
+func (d *Disjunction) Default() Value {
+ switch d.NumDefaults {
+ case 0:
+ return d
+ case 1:
+ return d.Values[0]
+ default:
+ return &Disjunction{
+ Src: d.Src,
+ Values: d.Values[:d.NumDefaults],
+ NumDefaults: 0,
+ }
+ }
+}
+
+// Default returns the default value or itself if there is no default.
+func (v *Vertex) Default() *Vertex {
+ d, ok := v.Value.(*Disjunction)
+ if !ok {
+ return v
+ }
+
+ var w *Vertex
+
+ switch d.NumDefaults {
+ case 0:
+ return v
+ case 1:
+ w = d.Values[0]
+ default:
+ x := *v
+ x.Value = &Disjunction{
+ Src: d.Src,
+ Values: d.Values[:d.NumDefaults],
+ NumDefaults: 0,
+ }
+ w = &x
+ }
+
+ w.Conjuncts = nil
+ for _, c := range v.Conjuncts {
+ // TODO: preserve field information.
+ expr, _ := stripNonDefaults(c.Expr())
+ w.AddConjunct(MakeConjunct(c.Env, expr))
+ }
+ return w
+}
+
+// TODO: this should go: record preexpanded disjunctions in Vertex.
+func stripNonDefaults(expr Expr) (r Expr, stripped bool) {
+ switch x := expr.(type) {
+ case *DisjunctionExpr:
+ if !x.HasDefaults {
+ return x, false
+ }
+ d := *x
+ d.Values = []Disjunct{}
+ for _, v := range x.Values {
+ if v.Default {
+ d.Values = append(d.Values, v)
+ }
+ }
+ if len(d.Values) == 1 {
+ return d.Values[0].Val, true
+ }
+ return &d, true
+
+ case *BinaryExpr:
+ if x.Op != AndOp {
+ return x, false
+ }
+ a, sa := stripNonDefaults(x.X)
+ b, sb := stripNonDefaults(x.Y)
+ if sa || sb {
+ bin := *x
+ bin.X = a
+ bin.Y = b
+ return &bin, true
+ }
+ return x, false
+
+ default:
+ return x, false
+ }
+}
diff --git a/internal/core/adt/errors.go b/internal/core/adt/errors.go
new file mode 100644
index 0000000..58ddac5
--- /dev/null
+++ b/internal/core/adt/errors.go
@@ -0,0 +1,193 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package adt
+
+// This file contains error encodings.
+//
+//
+// *Bottom:
+// - an adt.Value
+// - always belongs to a single vertex.
+// - does NOT implement error
+// - marks error code used for control flow
+//
+// errors.Error
+// - CUE default error
+// - implements error
+// - tracks error locations
+// - has error message details
+// - supports multiple errors
+//
+
+import (
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/errors"
+)
+
+// ErrorCode indicates the type of error. The type of error may influence
+// control flow. No other aspects of an error may influence control flow.
+type ErrorCode int
+
+const (
+ // An EvalError is a fatal evaluation error.
+ EvalError ErrorCode = iota
+
+ // A UserError is a fatal error originating from the user.
+ UserError
+
+ // NotExistError is used to indicate a value does not exist.
+ // Mostly used for legacy reasons.
+ NotExistError
+
+ // IncompleteError means an evaluation could not complete because of
+ // insufficient information that may still be added later.
+ IncompleteError
+
+ // A CycleError indicates a reference error. It is considered to be
+ // an incomplete error, as reference errors may be broken by providing
+ // a concrete value.
+ CycleError
+)
+
+func (c ErrorCode) String() string {
+ switch c {
+ case EvalError:
+ return "eval"
+ case UserError:
+ return "user"
+ case IncompleteError:
+ return "incomplete"
+ case CycleError:
+ return "cycle"
+ }
+ return "unknown"
+}
+
+// Bottom represents an error or bottom symbol.
+//
+// Although a Bottom node holds control data, it should not be created until the
+// control information already resulted in an error.
+type Bottom struct {
+ Src ast.Node
+ Err errors.Error
+
+ Code ErrorCode
+ HasRecursive bool
+ ChildError bool // Err is the error of the child
+ // Value holds the computed value so far in case
+ Value Value
+}
+
+func (x *Bottom) Source() ast.Node { return x.Src }
+func (x *Bottom) Kind() Kind { return BottomKind }
+func (x *Bottom) Specialize(k Kind) Value { return x } // XXX remove
+
+func (b *Bottom) IsIncomplete() bool {
+ if b == nil {
+ return false
+ }
+ return b.Code == IncompleteError || b.Code == CycleError
+}
+
+// isLiteralBottom reports whether x is an error originating from a user.
+func isLiteralBottom(x Expr) bool {
+ b, ok := x.(*Bottom)
+ return ok && b.Code == UserError
+}
+
+// isError reports whether v is an error or nil.
+func isError(v Value) bool {
+ if v == nil {
+ return true
+ }
+ _, ok := v.(*Bottom)
+ return ok
+}
+
+// isIncomplete reports whether v is associated with an incomplete error.
+func isIncomplete(v *Vertex) bool {
+ if v == nil {
+ return true
+ }
+ if b, ok := v.Value.(*Bottom); ok {
+ return b.IsIncomplete()
+ }
+ return false
+}
+
+// AddChildError updates x to record an error that occurred in one of
+// its descendent arcs. The resulting error will record the worst error code of
+// the current error or recursive error.
+//
+// If x is not already an error, the value is recorded in the error for
+// reference.
+//
+func (v *Vertex) AddChildError(recursive *Bottom) {
+ v.ChildErrors = CombineErrors(nil, v.ChildErrors, recursive)
+ if recursive.IsIncomplete() {
+ return
+ }
+ x := v.Value
+ err, _ := x.(*Bottom)
+ if err == nil {
+ v.Value = &Bottom{
+ Code: recursive.Code,
+ Value: x,
+ HasRecursive: true,
+ ChildError: true,
+ Err: recursive.Err,
+ }
+ return
+ }
+
+ err.HasRecursive = true
+ if err.Code > recursive.Code {
+ err.Code = recursive.Code
+ }
+
+ v.Value = err
+}
+
+// CombineErrors combines two errors that originate at the same Vertex.
+func CombineErrors(src ast.Node, x, y Value) *Bottom {
+ a, _ := x.(*Bottom)
+ b, _ := y.(*Bottom)
+
+ switch {
+ case a != nil && b != nil:
+ case a != nil:
+ return a
+ case b != nil:
+ return b
+ default:
+ return nil
+ }
+
+ if a.Code != b.Code {
+ if a.Code > b.Code {
+ a, b = b, a
+ }
+
+ if b.Code >= IncompleteError {
+ return a
+ }
+ }
+
+ return &Bottom{
+ Src: src,
+ Err: errors.Append(a.Err, b.Err),
+ Code: a.Code,
+ }
+}
diff --git a/internal/core/adt/expr.go b/internal/core/adt/expr.go
index 0a607ec..2346519 100644
--- a/internal/core/adt/expr.go
+++ b/internal/core/adt/expr.go
@@ -15,6 +15,8 @@
package adt
import (
+ "bytes"
+ "fmt"
"regexp"
"github.com/cockroachdb/apd/v2"
@@ -35,6 +37,13 @@
func (x *StructLit) Source() ast.Node { return x.Src }
+func (x *StructLit) evaluate(c *OpContext) Value {
+ e := c.Env(0)
+ v := &Vertex{Conjuncts: []Conjunct{{e, x}}}
+ c.Unifier.Unify(c, v, Finalized) // TODO: also partial okay?
+ return v
+}
+
// FIELDS
//
// Fields can also be used as expressions whereby the value field is the
@@ -58,7 +67,12 @@
Value Expr
}
-func (x *Field) Source() ast.Node { return x.Src }
+func (x *Field) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
// An OptionalField represents an optional regular field.
//
@@ -70,7 +84,12 @@
Value Expr
}
-func (x *OptionalField) Source() ast.Node { return x.Src }
+func (x *OptionalField) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
// A BulkOptionalField represents a set of optional field.
//
@@ -83,7 +102,12 @@
Label Feature // for reference and formatting
}
-func (x *BulkOptionalField) Source() ast.Node { return x.Src }
+func (x *BulkOptionalField) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
// A Ellipsis represents a set of optional fields of a given type.
//
@@ -94,7 +118,12 @@
Value Expr
}
-func (x *Ellipsis) Source() ast.Node { return x.Src }
+func (x *Ellipsis) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
// A DynamicField represents a regular field for which the key is computed.
//
@@ -111,9 +140,12 @@
return x.Src.Optional != token.NoPos
}
-func (x *DynamicField) Source() ast.Node { return x.Src }
-
-// Expressions
+func (x *DynamicField) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
// A ListLit represents an unevaluated list literal.
//
@@ -126,18 +158,19 @@
Elems []Elem
}
-func (x *ListLit) Source() ast.Node { return x.Src }
-
-// -- Literals
-
-// Bottom represents an error or bottom symbol.
-type Bottom struct {
- Src ast.Node
- Err errors.Error
+func (x *ListLit) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
}
-func (x *Bottom) Source() ast.Node { return x.Src }
-func (x *Bottom) Kind() Kind { return BottomKind }
+func (x *ListLit) evaluate(c *OpContext) Value {
+ e := c.Env(0)
+ v := &Vertex{Conjuncts: []Conjunct{{e, x}}}
+ c.Unifier.Unify(c, v, Finalized) // TODO: also partial okay?
+ return v
+}
// Null represents null. It can be used as a Value and Expr.
type Null struct {
@@ -163,9 +196,37 @@
X apd.Decimal // Is integer if the apd.Decimal is an integer.
}
+// TODO: do we need this?
+// func NewNumFromString(src ast.Node, s string) Value {
+// n := &Num{Src: src, K: IntKind}
+// if strings.ContainsAny(s, "eE.") {
+// n.K = FloatKind
+// }
+// _, _, err := n.X.SetString(s)
+// if err != nil {
+// pos := token.NoPos
+// if src != nil {
+// pos = src.Pos()
+// }
+// return &Bottom{Err: errors.Newf(pos, "invalid number: %v", err)}
+// }
+// return n
+// }
+
func (x *Num) Source() ast.Node { return x.Src }
func (x *Num) Kind() Kind { return x.K }
+// TODO: do we still need this?
+// func (x *Num) Specialize(k Kind) Value {
+// k = k & x.K
+// if k == x.K {
+// return x
+// }
+// y := *x
+// y.K = k
+// return &y
+// }
+
// String is a string value. It can be used as a Value and Expr.
type String struct {
Src ast.Node
@@ -186,11 +247,12 @@
func (x *Bytes) Source() ast.Node { return x.Src }
func (x *Bytes) Kind() Kind { return BytesKind }
-// -- composites: the evaluated fields of a composite are recorded in the arc
+// Composites: the evaluated fields of a composite are recorded in the arc
// vertices.
type ListMarker struct {
- Src ast.Node
+ Src ast.Node
+ IsOpen bool
}
func (x *ListMarker) Source() ast.Node { return x.Src }
@@ -198,15 +260,15 @@
func (x *ListMarker) node() {}
type StructMarker struct {
- Closed bool
+ // NeedClose is used to signal that the evaluator should close this struct.
+ // It is only set by the close builtin.
+ NeedClose bool
}
func (x *StructMarker) Source() ast.Node { return nil }
func (x *StructMarker) Kind() Kind { return StructKind }
func (x *StructMarker) node() {}
-// -- top types
-
// Top represents all possible values. It can be used as a Value and Expr.
type Top struct{ Src *ast.Ident }
@@ -226,8 +288,24 @@
K Kind
}
-func (x *BasicType) Source() ast.Node { return x.Src }
-func (x *BasicType) Kind() Kind { return x.K }
+func (x *BasicType) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+func (x *BasicType) Kind() Kind { return x.K }
+
+// TODO: do we still need this?
+// func (x *BasicType) Specialize(k Kind) Value {
+// k = x.K & k
+// if k == x.K {
+// return x
+// }
+// y := *x
+// y.K = k
+// return &y
+// }
// TODO: should we use UnaryExpr for Bound now we have BoundValue?
@@ -242,7 +320,31 @@
Expr Expr
}
-func (x *BoundExpr) Source() ast.Node { return x.Src }
+func (x *BoundExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *BoundExpr) evaluate(ctx *OpContext) Value {
+ if v, ok := x.Expr.(Value); ok {
+ if v == nil || v.Concreteness() > Concrete {
+ return ctx.NewErrf("bound has fixed non-concrete value")
+ }
+ return &BoundValue{x.Src, x.Op, v}
+ }
+ v := ctx.value(x.Expr)
+ if isError(v) {
+ return v
+ }
+ if v.Concreteness() > Concrete {
+ ctx.addErrf(IncompleteError, ctx.pos(),
+ "non-concrete value %s for bound %s", ctx.Str(x.Expr), x.Op)
+ return nil
+ }
+ return &BoundValue{x.Src, x.Op, v}
+}
// A BoundValue is a fully evaluated unary comparator that can be used to
// validate other values.
@@ -254,13 +356,45 @@
Src ast.Expr
Op Op
Value Value
- K Kind
}
func (x *BoundValue) Source() ast.Node { return x.Src }
-func (x *BoundValue) Kind() Kind { return x.K }
+func (x *BoundValue) Kind() Kind {
+ k := x.Value.Kind()
+ switch k {
+ case IntKind, FloatKind, NumKind:
+ return NumKind
-// -- References
+ case NullKind:
+ if x.Op == NotEqualOp {
+ return TopKind &^ NullKind
+ }
+ }
+ return k
+}
+
+func (x *BoundValue) validate(c *OpContext, y Value) *Bottom {
+ a := y // Can be list or struct.
+ b := c.scalar(x.Value)
+ if c.HasErr() {
+ return c.Err()
+ }
+
+ switch v := BinOp(c, x.Op, a, b).(type) {
+ case *Bottom:
+ return v
+
+ case *Bool:
+ if v.B {
+ return nil
+ }
+ return c.NewErrf("invalid value %v (out of bound %s)",
+ c.Str(y), c.Str(x))
+
+ default:
+ panic(fmt.Sprintf("unsupported type %T", v))
+ }
+}
// A FieldReference represents a lexical reference to a field.
//
@@ -272,7 +406,18 @@
Label Feature
}
-func (x *FieldReference) Source() ast.Node { return x.Src }
+func (x *FieldReference) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *FieldReference) resolve(c *OpContext) *Vertex {
+ n := c.relNode(x.UpCount)
+ pos := pos(x)
+ return c.lookup(n, pos, x.Label)
+}
// A LabelReference refers to the string or integer value of a label.
//
@@ -285,7 +430,25 @@
// TODO: should this implement resolver at all?
-func (x *LabelReference) Source() ast.Node { return x.Src }
+func (x *LabelReference) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *LabelReference) evaluate(ctx *OpContext) Value {
+ label := ctx.relLabel(x.UpCount)
+ if label == 0 {
+ // There is no label. This may happen if a LabelReference is evaluated
+ // outside of the context of a parent node, for instance if an
+ // "additional" items or properties is evaluated in isolation.
+ //
+ // TODO: this should return the pattern of the label.
+ return &BasicType{K: StringKind}
+ }
+ return label.ToValue(ctx)
+}
// A DynamicReference is like a LabelReference, but with a computed label.
//
@@ -306,7 +469,21 @@
Alias Feature
}
-func (x *DynamicReference) Source() ast.Node { return x.Src }
+func (x *DynamicReference) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *DynamicReference) resolve(ctx *OpContext) *Vertex {
+ e := ctx.Env(x.UpCount)
+ frame := ctx.PushState(e, x.Src)
+ v := ctx.value(x.Label)
+ ctx.PopState(frame)
+ f := ctx.Label(v)
+ return ctx.lookup(e.Vertex, pos(x), f)
+}
// An ImportReference refers to an imported package.
//
@@ -320,7 +497,25 @@
Label Feature // for informative purposes
}
-func (x *ImportReference) Source() ast.Node { return x.Src }
+func (x *ImportReference) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+// TODO: imports
+// func (x *ImportReference) resolve(c *context, e *environment) (arc, *Bottom) {
+// return c.r.lookupImport(e, x.importPath)
+// }
+
+// func (x *ImportReference) eval(c *context, e *environment) envVal {
+// arc, err := lookup(e, e.node, x.label)
+// if err != nil {
+// return err
+// }
+// return envVal{e, arc.eval()}
+// }
// A LetReference evaluates a let expression in its original environment.
//
@@ -333,7 +528,26 @@
X Expr
}
-func (x *LetReference) Source() ast.Node { return x.Src }
+func (x *LetReference) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *LetReference) resolve(c *OpContext) *Vertex {
+ e := c.Env(x.UpCount)
+ label := e.Vertex.Label
+ // Anonymous arc.
+ return &Vertex{Parent: nil, Label: label, Conjuncts: []Conjunct{{e, x.X}}}
+}
+
+func (x *LetReference) evaluate(c *OpContext) Value {
+ e := c.Env(x.UpCount)
+
+ // Not caching let expressions may lead to exponential behavior.
+ return e.evalCached(c, x.X)
+}
// A SelectorExpr looks up a fixed field in an expression.
//
@@ -345,7 +559,17 @@
Sel Feature
}
-func (x *SelectorExpr) Source() ast.Node { return x.Src }
+func (x *SelectorExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *SelectorExpr) resolve(c *OpContext) *Vertex {
+ n := c.node(x.X, Partial)
+ return c.lookup(n, x.Src.Sel.NamePos, x.Sel)
+}
// IndexExpr is like a selector, but selects an index.
//
@@ -357,7 +581,20 @@
Index Expr
}
-func (x *IndexExpr) Source() ast.Node { return x.Src }
+func (x *IndexExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *IndexExpr) resolve(ctx *OpContext) *Vertex {
+ // TODO: support byte index.
+ n := ctx.node(x.X, Partial)
+ i := ctx.value(x.Index)
+ f := ctx.Label(i)
+ return ctx.lookup(n, x.Src.Index.Pos(), f)
+}
// A SliceExpr represents a slice operation. (Not currently in spec.)
//
@@ -371,7 +608,85 @@
Stride Expr
}
-func (x *SliceExpr) Source() ast.Node { return x.Src }
+func (x *SliceExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *SliceExpr) evaluate(c *OpContext) Value {
+ // TODO: strides
+
+ v := c.value(x.X)
+ const as = "slice index"
+
+ switch v := v.(type) {
+ case nil:
+ c.addErrf(IncompleteError, c.pos(),
+ "non-concrete slice subject %s", c.Str(x.X))
+ return nil
+ case *Vertex:
+ if !v.IsList() {
+ break
+ }
+
+ var (
+ lo = uint64(0)
+ hi = uint64(len(v.Arcs))
+ )
+ if x.Lo != nil {
+ lo = c.uint64(c.value(x.Lo), as)
+ }
+ if x.Hi != nil {
+ hi = c.uint64(c.value(x.Hi), as)
+ if hi > uint64(len(v.Arcs)) {
+ return c.NewErrf("index %d out of range", hi)
+ }
+ }
+ if lo > hi {
+ return c.NewErrf("invalid slice index: %d > %d", lo, hi)
+ }
+
+ n := c.newList(c.src, v.Parent)
+ for i, a := range v.Arcs[lo:hi] {
+ label, err := MakeLabel(a.Source(), int64(i), IntLabel)
+ if err != nil {
+ c.AddBottom(&Bottom{Src: a.Source(), Err: err})
+ return nil
+ }
+ n.Arcs = append(n.Arcs, &Vertex{
+ Label: label,
+ Conjuncts: a.Conjuncts,
+ })
+ }
+ return n
+
+ case *Bytes:
+ var (
+ lo = uint64(0)
+ hi = uint64(len(v.B))
+ )
+ if x.Lo != nil {
+ lo = c.uint64(c.value(x.Lo), as)
+ }
+ if x.Hi != nil {
+ hi = c.uint64(c.value(x.Hi), as)
+ if hi > uint64(len(v.B)) {
+ return c.NewErrf("index %d out of range", hi)
+ }
+ }
+ if lo > hi {
+ return c.NewErrf("invalid slice index: %d > %d", lo, hi)
+ }
+ return c.newBytes(v.B[lo:hi])
+ }
+
+ if isError(v) {
+ return v
+ }
+ return c.NewErrf("cannot slice %v (type %s)", c.Str(v), v.Kind())
+}
// An Interpolation is a string interpolation.
//
@@ -383,7 +698,34 @@
Parts []Expr // odd: strings, even sources
}
-func (x *Interpolation) Source() ast.Node { return x.Src }
+func (x *Interpolation) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *Interpolation) evaluate(c *OpContext) Value {
+ buf := bytes.Buffer{}
+ for _, e := range x.Parts {
+ v := c.value(e)
+ s := c.StringValue(v)
+ buf.WriteString(s)
+ }
+ if err := c.Err(); err != nil {
+ err = &Bottom{
+ Code: err.Code,
+ Err: errors.Wrapf(err.Err, pos(x), "invalid interpolation"),
+ }
+ // c.AddBottom(err)
+ // return nil
+ return err
+ }
+ // if k == bytesKind {
+ // return &BytesLit{x.source, buf.String(), nil}
+ // }
+ return &String{x.Src, buf.String(), nil}
+}
// UnaryExpr is a unary expression.
//
@@ -396,7 +738,55 @@
X Expr
}
-func (x *UnaryExpr) Source() ast.Node { return x.Src }
+func (x *UnaryExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *UnaryExpr) evaluate(c *OpContext) Value {
+ if !c.concreteIsPossible(x.X) {
+ return nil
+ }
+ v := c.value(x.X)
+ if isError(v) {
+ return v
+ }
+
+ op := x.Op
+ k := kind(v)
+ expectedKind := k
+ switch op {
+ case SubtractOp:
+ if v, ok := v.(*Num); ok {
+ f := *v
+ f.X.Neg(&v.X)
+ f.Src = x.Src
+ return &f
+ }
+ expectedKind = NumKind
+
+ case AddOp:
+ if v, ok := v.(*Num); ok {
+ // TODO: wrap in thunk to save position of '+'?
+ return v
+ }
+ expectedKind = NumKind
+
+ case NotOp:
+ if v, ok := v.(*Bool); ok {
+ return &Bool{x.Src, !v.B}
+ }
+ expectedKind = BoolKind
+ }
+ if k&expectedKind != BottomKind {
+ c.addErrf(IncompleteError, pos(x.X),
+ "operand %s of '%s' not concrete (was %s)", c.Str(x), op, k)
+ return nil
+ }
+ return c.NewErrf("invalid operation %s%s (%s %s)", op, c.Str(x), op, k)
+}
// BinaryExpr is a binary expression.
//
@@ -410,9 +800,64 @@
Y Expr
}
-func (x *BinaryExpr) Source() ast.Node { return x.Src }
+func (x *BinaryExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
-// -- builtins
+func (x *BinaryExpr) evaluate(c *OpContext) Value {
+ env := c.Env(0)
+ if x.Op == AndOp {
+ // Anonymous Arc
+ v := Vertex{Conjuncts: []Conjunct{{env, x}}}
+ return c.Unifier.Evaluate(c, &v)
+ }
+
+ if !c.concreteIsPossible(x.X) || !c.concreteIsPossible(x.Y) {
+ return nil
+ }
+
+ left, _ := c.Concrete(env, x.X, x.Op)
+ right, _ := c.Concrete(env, x.Y, x.Op)
+
+ leftKind := kind(left)
+ rightKind := kind(right)
+
+ // TODO: allow comparing to a literal Bottom only. Find something more
+ // principled perhaps. One should especially take care that two values
+ // evaluating to Bottom don't evaluate to true. For now we check for
+ // Bottom here and require that one of the values be a Bottom literal.
+ if isLiteralBottom(x.X) || isLiteralBottom(x.Y) {
+ if b := c.validate(left); b != nil {
+ left = b
+ }
+ if b := c.validate(right); b != nil {
+ right = b
+ }
+ switch x.Op {
+ case EqualOp:
+ return &Bool{x.Src, leftKind == rightKind}
+ case NotEqualOp:
+ return &Bool{x.Src, leftKind != rightKind}
+ }
+ }
+
+ if err := CombineErrors(x.Src, left, right); err != nil {
+ return err
+ }
+
+ if err := c.Err(); err != nil {
+ return err
+ }
+
+ value := BinOp(c, x.Op, left, right)
+ if n, ok := value.(*Vertex); ok && n.IsList() {
+ n.UpdateStatus(Partial)
+ }
+ return value
+}
// A CallExpr represents a call to a builtin.
//
@@ -425,7 +870,18 @@
Args []Expr
}
-func (x *CallExpr) Source() ast.Node { return x.Src }
+func (x *CallExpr) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *CallExpr) evaluate(c *OpContext) Value {
+ c.addErrf(0, pos(x), "cannot call non-function %s (type %s)",
+ x.Fun, "nil")
+ return nil
+}
// A BuiltinValidator is a Value that results from evaluation a partial call
// to a builtin (using CallExpr).
@@ -434,13 +890,21 @@
//
type BuiltinValidator struct {
Src *ast.CallExpr
- Fun Expr // TODO: should probably be builtin.
+ Fun Expr
Args []Value // any but the first value
- // call *builtin // function must return a bool
}
-func (x *BuiltinValidator) Source() ast.Node { return x.Src }
-func (x *BuiltinValidator) Kind() Kind { return TopKind }
+func (x *BuiltinValidator) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+func (x *BuiltinValidator) Kind() Kind { return TopKind }
+
+func (x *BuiltinValidator) validate(c *OpContext, v Value) *Bottom {
+ return nil
+}
// A Disjunction represents a disjunction, where each disjunct may or may not
// be marked as a default.
@@ -464,6 +928,15 @@
return x.Src
}
+func (x *DisjunctionExpr) evaluate(c *OpContext) Value {
+ e := c.Env(0)
+ v := &Vertex{Conjuncts: []Conjunct{{e, x}}}
+ c.Unifier.Unify(c, v, Finalized) // TODO: also partial okay?
+ // TODO: if the disjunction result originated from a literal value, we may
+ // consider the result closed to create more permanent errors.
+ return v
+}
+
// A Conjunction is a conjunction of values that cannot be represented as a
// single value. It is the result of unification.
type Conjunction struct {
@@ -471,8 +944,14 @@
Values []Value
}
-func (x *Conjunction) Source() ast.Node { return nil }
-func (x *Conjunction) Kind() Kind { return TopKind }
+func (x *Conjunction) Source() ast.Node { return x.Src }
+func (x *Conjunction) Kind() Kind {
+ k := TopKind
+ for _, v := range x.Values {
+ k &= v.Kind()
+ }
+ return k
+}
// A disjunction is a disjunction of values. It is the result of expanding
// a DisjunctionExpr if the expression cannot be represented as a single value.
@@ -511,7 +990,36 @@
Dst Yielder
}
-func (x *ForClause) Source() ast.Node { return x.Syntax }
+func (x *ForClause) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Syntax
+}
+
+func (x *ForClause) yield(c *OpContext, f YieldFunc) {
+ n := c.node(x.Src, Finalized)
+ for _, a := range n.Arcs {
+ if !a.Label.IsRegular() {
+ continue
+ }
+
+ n := &Vertex{Arcs: []*Vertex{
+ {Label: x.Value, Conjuncts: a.Conjuncts}, // TODO: only needed if value label != _
+ }}
+ if x.Key != 0 {
+ v := &Vertex{Label: x.Key}
+ key := a.Label.ToValue(c)
+ v.AddConjunct(MakeConjunct(c.Env(0), key))
+ n.Arcs = append(n.Arcs, v)
+ }
+
+ x.Dst.yield(c.spawn(n), f)
+ if c.HasErr() {
+ break
+ }
+ }
+}
// An IfClause represents an if clause of a comprehension. It can be used
// as a struct or list element.
@@ -524,7 +1032,18 @@
Dst Yielder
}
-func (x *IfClause) Source() ast.Node { return x.Src }
+func (x *IfClause) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *IfClause) yield(ctx *OpContext, f YieldFunc) {
+ if ctx.BoolValue(ctx.value(x.Condition)) {
+ x.Dst.yield(ctx, f)
+ }
+}
// An LetClause represents a let clause in a comprehension.
//
@@ -537,11 +1056,32 @@
Dst Yielder
}
-func (x *LetClause) Source() ast.Node { return x.Src }
+func (x *LetClause) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *LetClause) yield(c *OpContext, f YieldFunc) {
+ n := &Vertex{Arcs: []*Vertex{
+ {Label: x.Label, Conjuncts: []Conjunct{{c.Env(0), x.Expr}}},
+ }}
+ x.Dst.yield(c.spawn(n), f)
+}
// A ValueClause represents the value part of a comprehension.
type ValueClause struct {
*StructLit
}
-func (x *ValueClause) Source() ast.Node { return x.Src }
+func (x *ValueClause) Source() ast.Node {
+ if x.Src == nil {
+ return nil
+ }
+ return x.Src
+}
+
+func (x *ValueClause) yield(op *OpContext, f YieldFunc) {
+ f(op.Env(0), x.StructLit)
+}
diff --git a/internal/core/adt/feature.go b/internal/core/adt/feature.go
index 684a74e..bc2a32c 100644
--- a/internal/core/adt/feature.go
+++ b/internal/core/adt/feature.go
@@ -16,6 +16,7 @@
import (
"strconv"
+ "strings"
"cuelang.org/go/cue/ast"
"cuelang.org/go/cue/errors"
@@ -27,6 +28,8 @@
// representation of an integer or string label as well as a label type.
type Feature uint32
+// TODO: create labels such that list are sorted first (or last with index.)
+
// InvalidLabel is an encoding of an erroneous label.
const InvalidLabel Feature = 0x7 // 0xb111
@@ -68,9 +71,121 @@
}
}
+// StringValue reports the string value of f, which must be a string label.
+func (f Feature) StringValue(index StringIndexer) string {
+ if !f.IsString() {
+ panic("not a string label")
+ }
+ x := f.Index()
+ return index.IndexToString(int64(x))
+}
+
+// ToValue converts a label to a value, which will be a Num for integer labels
+// and a String for string labels. It panics when f is not a regular label.
+func (f Feature) ToValue(ctx *OpContext) Value {
+ if !f.IsRegular() {
+ panic("not a regular label")
+ }
+ if f.IsInt() {
+ return ctx.NewInt64(int64(f.Index()))
+ }
+ x := f.Index()
+ str := ctx.IndexToString(int64(x))
+ return ctx.NewString(str)
+}
+
+// StringLabel converts s to a string label.
+func (c *OpContext) StringLabel(s string) Feature {
+ return labelFromValue(c, &String{Str: s})
+}
+
+// MakeStringLabel creates a label for the given string.
+func MakeStringLabel(r StringIndexer, s string) Feature {
+ i := r.StringToIndex(s)
+
+ // TODO: set position if it exists.
+ f, err := MakeLabel(nil, i, StringLabel)
+ if err != nil {
+ panic("out of free string slots")
+ }
+ return f
+}
+
+// MakeIdentLabel creates a label for the given identifier.
+func MakeIdentLabel(r StringIndexer, s string) Feature {
+ i := r.StringToIndex(s)
+ t := StringLabel
+ switch {
+ case strings.HasPrefix(s, "#_"):
+ t = HiddenDefinitionLabel
+ case strings.HasPrefix(s, "#"):
+ t = DefinitionLabel
+ case strings.HasPrefix(s, "_"):
+ t = HiddenLabel
+ }
+ f, err := MakeLabel(nil, i, t)
+ if err != nil {
+ panic("out of free string slots")
+ }
+ return f
+}
+
+const msgGround = "invalid non-ground value %s (must be concrete %s)"
+
+func labelFromValue(ctx *OpContext, v Value) Feature {
+ var i int64
+ var t FeatureType
+ if isError(v) {
+ return InvalidLabel
+ }
+ switch v.Kind() {
+ case IntKind, NumKind:
+ x, _ := v.(*Num)
+ if x == nil {
+ ctx.addErrf(IncompleteError, pos(v), msgGround, v, "int")
+ return InvalidLabel
+ }
+ t = IntLabel
+ var err error
+ i, err = x.X.Int64()
+ if err != nil || x.K != IntKind {
+ ctx.AddErrf("invalid label %v: %v", v, err)
+ return InvalidLabel
+ }
+ if i < 0 {
+ ctx.AddErrf("invalid negative index %s", ctx.Str(x))
+ return InvalidLabel
+ }
+
+ case StringKind:
+ x, _ := v.(*String)
+ if x == nil {
+ ctx.addErrf(IncompleteError, pos(v), msgGround, v, "string")
+ return InvalidLabel
+ }
+ t = StringLabel
+ i = ctx.StringToIndex(x.Str)
+
+ default:
+ ctx.AddErrf("invalid label type %v", v.Kind())
+ return InvalidLabel
+ }
+
+ // TODO: set position if it exists.
+ f, err := MakeLabel(nil, i, t)
+ if err != nil {
+ ctx.AddErr(err)
+ }
+ return f
+}
+
// MakeLabel creates a label. It reports an error if the index is out of range.
-func MakeLabel(p token.Pos, index int64, f FeatureType) (Feature, errors.Error) {
+func MakeLabel(src ast.Node, index int64, f FeatureType) (Feature, errors.Error) {
if 0 > index || index > MaxIndex {
+ p := token.NoPos
+ if src != nil {
+ p = src.Pos()
+ }
return InvalidLabel,
errors.Newf(p, "int label out of range (%d not >=0 and <= %d)",
index, MaxIndex)
diff --git a/internal/core/adt/kind.go b/internal/core/adt/kind.go
index cd82b4e..ddc3cbb 100644
--- a/internal/core/adt/kind.go
+++ b/internal/core/adt/kind.go
@@ -80,6 +80,13 @@
IntKind | FloatKind | StringKind | BytesKind
)
+func kind(v Value) Kind {
+ if v == nil {
+ return BottomKind
+ }
+ return v.Kind()
+}
+
// IsAnyOf reports whether k is any of the given kinds.
//
// For instances, k.IsAnyOf(String|Bytes) reports whether k overlaps with
diff --git a/internal/core/adt/simplify.go b/internal/core/adt/simplify.go
new file mode 100644
index 0000000..aa3eb58
--- /dev/null
+++ b/internal/core/adt/simplify.go
@@ -0,0 +1,195 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package adt
+
+import (
+ "github.com/cockroachdb/apd/v2"
+)
+
+// SimplifyBounds collapses bounds if possible. The bound values must be
+// concrete. It returns nil if the bound values cannot be collapsed.
+//
+// k represents additional type constraints, such as `int`.
+func SimplifyBounds(ctx *OpContext, k Kind, x, y *BoundValue) Value {
+ xv := x.Value
+ yv := y.Value
+
+ cmp, xCat := opInfo(x.Op)
+ _, yCat := opInfo(y.Op)
+
+ // k := x.Kind() & y.Kind()
+
+ switch {
+ case xCat == yCat:
+ if x.Op == NotEqualOp || x.Op == MatchOp || x.Op == NotMatchOp {
+ if test(ctx, EqualOp, xv, yv) {
+ return x
+ }
+ break // unify the two bounds
+ }
+
+ // xCat == yCat && x.Op != NotEqualOp
+ // > a & >= b
+ // > a if a >= b
+ // >= b if a < b
+ // > a & > b
+ // > a if a >= b
+ // > b if a < b
+ // >= a & > b
+ // >= a if a > b
+ // > b if a <= b
+ // >= a & >= b
+ // >= a if a > b
+ // >= b if a <= b
+ // inverse is true as well.
+
+ // Tighten bound.
+ if test(ctx, cmp, xv, yv) {
+ return x
+ }
+ return y
+
+ case xCat == -yCat:
+ if xCat == -1 {
+ x, y = y, x
+ }
+ a, aOK := xv.(*Num)
+ b, bOK := yv.(*Num)
+
+ if !aOK || !bOK {
+ break
+ }
+
+ var d, lo, hi apd.Decimal
+ lo.Set(&a.X)
+ hi.Set(&b.X)
+ if k&FloatKind == 0 {
+ // Readjust bounds for integers.
+ if x.Op == GreaterEqualOp {
+ // >=3.4 ==> >=4
+ _, _ = apd.BaseContext.Ceil(&lo, &a.X)
+ } else {
+ // >3.4 ==> >3
+ _, _ = apd.BaseContext.Floor(&lo, &a.X)
+ }
+ if y.Op == LessEqualOp {
+ // <=2.3 ==> <= 2
+ _, _ = apd.BaseContext.Floor(&hi, &b.X)
+ } else {
+ // <2.3 ==> < 3
+ _, _ = apd.BaseContext.Ceil(&hi, &b.X)
+ }
+ }
+
+ cond, err := apd.BaseContext.Sub(&d, &hi, &lo)
+ if cond.Inexact() || err != nil {
+ break
+ }
+
+ // attempt simplification
+ // numbers
+ // >=a & <=b
+ // a if a == b
+ // _|_ if a < b
+ // >=a & <b
+ // _|_ if b <= a
+ // >a & <=b
+ // _|_ if b <= a
+ // >a & <b
+ // _|_ if b <= a
+
+ // integers
+ // >=a & <=b
+ // a if b-a == 0
+ // _|_ if a < b
+ // >=a & <b
+ // a if b-a == 1
+ // _|_ if b <= a
+ // >a & <=b
+ // b if b-a == 1
+ // _|_ if b <= a
+ // >a & <b
+ // a+1 if b-a == 2
+ // _|_ if b <= a
+
+ switch diff, err := d.Int64(); {
+ case err != nil:
+
+ case diff == 1:
+ if k&FloatKind == 0 {
+ if x.Op == GreaterEqualOp && y.Op == LessThanOp {
+ return ctx.newNum(&lo, k&NumKind, x, y)
+ }
+ if x.Op == GreaterThanOp && y.Op == LessEqualOp {
+ return ctx.newNum(&hi, k&NumKind, x, y)
+ }
+ }
+
+ case diff == 2:
+ if k&FloatKind == 0 && x.Op == GreaterThanOp && y.Op == LessThanOp {
+ _, _ = apd.BaseContext.Add(&d, d.SetInt64(1), &lo)
+ return ctx.newNum(&d, k&NumKind, x, y)
+
+ }
+
+ case diff == 0:
+ if x.Op == GreaterEqualOp && y.Op == LessEqualOp {
+ return ctx.newNum(&lo, k&NumKind, x, y)
+ }
+ fallthrough
+
+ case d.Negative:
+ return ctx.NewErrf("bounds %v %v", ctx.Str(x), ctx.Str(y))
+ }
+
+ case x.Op == NotEqualOp:
+ if !test(ctx, y.Op, xv, yv) {
+ return y
+ }
+
+ case y.Op == NotEqualOp:
+ if !test(ctx, x.Op, yv, xv) {
+ return x
+ }
+ }
+ return nil
+}
+
+func opInfo(op Op) (cmp Op, norm int) {
+ switch op {
+ case GreaterThanOp:
+ return GreaterEqualOp, 1
+ case GreaterEqualOp:
+ return GreaterThanOp, 1
+ case LessThanOp:
+ return LessEqualOp, -1
+ case LessEqualOp:
+ return LessThanOp, -1
+ case NotEqualOp:
+ return NotEqualOp, 0
+ case MatchOp:
+ return MatchOp, 2
+ case NotMatchOp:
+ return NotMatchOp, 3
+ }
+ panic("cue: unreachable")
+}
+
+func test(ctx *OpContext, op Op, a, b Value) bool {
+ if b, ok := BinOp(ctx, op, a, b).(*Bool); ok {
+ return b.B
+ }
+ return false
+}
diff --git a/internal/core/compile/compile.go b/internal/core/compile/compile.go
index 018b99b..eacd993 100644
--- a/internal/core/compile/compile.go
+++ b/internal/core/compile/compile.go
@@ -24,7 +24,6 @@
"cuelang.org/go/cue/token"
"cuelang.org/go/internal"
"cuelang.org/go/internal/core/adt"
- "cuelang.org/go/internal/core/runtime"
"golang.org/x/xerrors"
)
@@ -37,7 +36,7 @@
// the packages names are consistent.
//
// Files may return a completed parse even if it has errors.
-func Files(cfg *Config, r *runtime.Runtime, files ...*ast.File) (*adt.Vertex, errors.Error) {
+func Files(cfg *Config, r adt.Runtime, files ...*ast.File) (*adt.Vertex, errors.Error) {
c := &compiler{index: r}
v := c.compileFiles(files)
@@ -74,7 +73,7 @@
Message: errors.NewMessage(format, args),
}
c.errs = errors.Append(c.errs, err)
- return &adt.Bottom{}
+ return &adt.Bottom{Err: err}
}
func (c *compiler) path() []string {
@@ -387,7 +386,7 @@
name, isIdent, err := ast.LabelName(lab)
if err == nil && isIdent {
idx := c.index.StringToIndex(name)
- label, _ = adt.MakeLabel(x.Pos(), idx, adt.DefinitionLabel)
+ label, _ = adt.MakeLabel(x, idx, adt.DefinitionLabel)
}
}
@@ -667,7 +666,11 @@
return slice
case *ast.BottomLit:
- return &adt.Bottom{Src: n}
+ return &adt.Bottom{
+ Src: n,
+ Code: adt.UserError,
+ Err: errors.Newf(n.Pos(), "from source"),
+ }
case *ast.BadExpr:
return c.errf(n, "invalid expression")
@@ -787,6 +790,8 @@
d.Values = append(d.Values, adt.Disjunct{Val: c.expr(n), Default: mark})
}
+// TODO(perf): validate that regexps are cached at the right time.
+
func (c *compiler) parse(l *ast.BasicLit) (n adt.Expr) {
s := l.Value
if s == "" {
diff --git a/internal/core/compile/label.go b/internal/core/compile/label.go
index 1c03298..4320252 100644
--- a/internal/core/compile/label.go
+++ b/internal/core/compile/label.go
@@ -42,7 +42,7 @@
case strings.HasPrefix(s, "_"):
t = adt.HiddenLabel
}
- f, err := adt.MakeLabel(n.Pos(), i, t)
+ f, err := adt.MakeLabel(n, i, t)
if err != nil {
c.errf(n, "invalid identifier label: %v", err)
return adt.InvalidLabel
@@ -60,7 +60,7 @@
}
i := int64(index.StringToIndex(norm.NFC.String(s)))
- f, err := adt.MakeLabel(n.Pos(), i, adt.StringLabel)
+ f, err := adt.MakeLabel(n, i, adt.StringLabel)
if err != nil {
c.errf(n, msg, err)
}
@@ -85,7 +85,7 @@
return adt.InvalidLabel
}
- f, err := adt.MakeLabel(n.Pos(), i, adt.IntLabel)
+ f, err := adt.MakeLabel(n, i, adt.IntLabel)
if err != nil {
c.errf(n, msg, err)
return adt.InvalidLabel
@@ -98,7 +98,7 @@
default: // keywords (null, true, false, for, in, if, let)
i := index.StringToIndex(x.Kind.String())
- f, err := adt.MakeLabel(n.Pos(), i, adt.StringLabel)
+ f, err := adt.MakeLabel(n, i, adt.StringLabel)
if err != nil {
c.errf(n, "invalid string label: %v", err)
}
diff --git a/internal/core/compile/predeclared.go b/internal/core/compile/predeclared.go
index fcadc36..6c54d85 100644
--- a/internal/core/compile/predeclared.go
+++ b/internal/core/compile/predeclared.go
@@ -59,6 +59,12 @@
return nil
}
+// LookupRange returns a CUE expressions for the given predeclared identifier
+// representing a range, such as uint8, int128, and float64.
+func LookupRange(name string) adt.Expr {
+ return predefinedRanges[name]
+}
+
var predefinedRanges = map[string]adt.Expr{
"rune": mkIntRange("0", strconv.Itoa(0x10FFFF)),
"int8": mkIntRange("-128", "127"),
@@ -90,7 +96,13 @@
),
}
-// TODO: use an adt.BoundValue here.
+func init() {
+ for k, v := range predefinedRanges {
+ predefinedRanges["__"+k] = v
+ }
+}
+
+// TODO: use an adt.BoundValue here. and conjunctions here.
func mkUint() adt.Expr {
from := newBound(adt.GreaterEqualOp, adt.IntKind, parseInt("0"))
@@ -107,25 +119,38 @@
func mkIntRange(a, b string) adt.Expr {
from := newBound(adt.GreaterEqualOp, adt.IntKind, parseInt(a))
to := newBound(adt.LessEqualOp, adt.IntKind, parseInt(b))
- return &adt.BinaryExpr{nil, adt.AndOp, from, to}
+ ident := ast.NewIdent("__int")
+ src := ast.NewBinExpr(token.AND, ident, from.Src, to.Src)
+ return &adt.Conjunction{
+ Src: src,
+ Values: []adt.Value{
+ &adt.BasicType{Src: ident, K: adt.IntKind}, from, to,
+ },
+ }
}
func mkFloatRange(a, b string) adt.Expr {
from := newBound(adt.GreaterEqualOp, adt.NumKind, parseFloat(a))
to := newBound(adt.LessEqualOp, adt.NumKind, parseFloat(b))
- return &adt.BinaryExpr{nil, adt.AndOp, from, to}
+ src := ast.NewBinExpr(token.AND, from.Src, to.Src)
+ return &adt.Conjunction{Src: src, Values: []adt.Value{from, to}}
}
func newBound(op adt.Op, k adt.Kind, v adt.Value) *adt.BoundValue {
- return &adt.BoundValue{Op: op, Value: v}
+ src := &ast.UnaryExpr{Op: op.Token(), X: v.Source().(ast.Expr)}
+ return &adt.BoundValue{Src: src, Op: op, Value: v}
}
func parseInt(s string) *adt.Num {
- return parseNum(adt.IntKind, s)
+ n := parseNum(adt.IntKind, s)
+ n.Src = &ast.BasicLit{Kind: token.INT, Value: s}
+ return n
}
func parseFloat(s string) *adt.Num {
- return parseNum(adt.FloatKind, s)
+ n := parseNum(adt.FloatKind, s)
+ n.Src = &ast.BasicLit{Kind: token.FLOAT, Value: s}
+ return n
}
func parseNum(k adt.Kind, s string) *adt.Num {
diff --git a/internal/core/debug/compact.go b/internal/core/debug/compact.go
index 5be3f10..67a4da9 100644
--- a/internal/core/debug/compact.go
+++ b/internal/core/debug/compact.go
@@ -133,8 +133,7 @@
w.string(`_|_`)
if x.Err != nil {
w.string("(")
- msg, args := x.Err.Msg()
- w.string(fmt.Sprintf(msg, args...))
+ w.string(x.Err.Error())
w.string(")")
}
diff --git a/internal/core/debug/debug.go b/internal/core/debug/debug.go
index b739bea..6209b83 100644
--- a/internal/core/debug/debug.go
+++ b/internal/core/debug/debug.go
@@ -109,39 +109,63 @@
}
kindStr := kind.String()
- kindStr = strings.ReplaceAll(kindStr, "{...}", "struct")
- kindStr = strings.ReplaceAll(kindStr, "[...]", "list")
+
+ // TODO: replace with showing full closedness data.
+ if x.IsClosed(nil) {
+ if kind == adt.ListKind || kind == adt.StructKind {
+ kindStr = "#" + kindStr
+ }
+ }
fmt.Fprintf(w, "(%s){", kindStr)
- if x.Value != nil && kind&^(adt.StructKind|adt.ListKind) != 0 {
- w.string(" ")
- w.node(x.Value)
- w.string(" }")
- return
- }
-
saved := w.indent
w.indent += " "
+ defer func() { w.indent = saved }()
- if b, ok := x.Value.(*adt.Bottom); ok {
+ switch v := x.Value.(type) {
+ case nil:
+ case *adt.Bottom:
+ // TODO: reuse bottom.
saved := w.indent
w.indent += "// "
w.string("\n")
- w.string(strings.TrimSpace(errors.Details(b.Err, &errors.Config{
- Cwd: w.cfg.Cwd,
- ToSlash: true,
- })))
+ fmt.Fprintf(w, "[%v]", v.Code)
+ if !v.ChildError {
+ msg := errors.Details(v.Err, &errors.Config{
+ Cwd: w.cfg.Cwd,
+ ToSlash: true,
+ })
+ msg = strings.TrimSpace(msg)
+ if msg != "" {
+ w.string(" ")
+ w.string(msg)
+ }
+ }
w.indent = saved
+
+ case *adt.StructMarker, *adt.ListMarker:
+ // if len(x.Arcs) == 0 {
+ // // w.string("}")
+ // // return
+ // }
+
+ default:
+ if len(x.Arcs) == 0 {
+ w.string(" ")
+ w.node(x.Value)
+ w.string(" }")
+ return
+ }
+ w.string("\n")
+ w.node(x.Value)
}
- if len(x.Arcs) > 0 {
- for _, a := range x.Arcs {
- w.string("\n")
- w.label(a.Label)
- w.string(": ")
- w.node(a)
- }
+ for _, a := range x.Arcs {
+ w.string("\n")
+ w.label(a.Label)
+ w.string(": ")
+ w.node(a)
}
if x.Value == nil {
diff --git a/internal/core/eval/closed.go b/internal/core/eval/closed.go
new file mode 100644
index 0000000..10cee11
--- /dev/null
+++ b/internal/core/eval/closed.go
@@ -0,0 +1,340 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+// The file implements the majority of the closed struct semantics.
+// The data is recorded in the Closed field of a Vertex.
+//
+// Each vertex has a set of conjuncts that make up the values of the vertex.
+// Each Conjunct may originate from various sources, like an embedding, field
+// definition or regular value. For the purpose of computing the value, the
+// source of the conjunct is irrelevant. The origin does matter, however, if
+// for determining whether a field is allowed in a closed struct. The Closed
+// field keeps track of the kind of origin for this purpose.
+//
+// More precisely, the CloseDef struct explains how the conjuncts of an arc
+// were combined and define a logical expression on the field sets
+// computed for each conjunct.
+//
+// While evaluating each conjunct, nodeContext keeps track what changes need to
+// be made to ClosedDef based on the evaluation of the current conjuncts.
+// For instance, if a field references a definition, all other previous
+// checks are useless, as the newly referred to definitions define an upper
+// bound and will contain all the information that is necessary to determine
+// whether a field may be included.
+//
+// Most of the logic in this file concerns itself with the combination of
+// multiple CloseDef values as well as traversing the structure to validate
+// whether an arc is allowed. The actual fieldSet logic is in optional.go
+// The overal control and use of the functionality in this file is used
+// in eval.go.
+
+import (
+ "cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/token"
+ "cuelang.org/go/internal/core/adt"
+)
+
+// acceptor implements adt.Acceptor.
+//
+// Note that it keeps track of whether it represents a closed struct. An
+// acceptor is also used to associate an CloseDef with a Vertex, and not
+// all CloseDefs represent a closed struct: a value that contains embeddings may
+// eventually turn into a closed struct. Consider
+//
+// a: {
+// b
+// d: e: int
+// }
+// b: d: {
+// #A & #B
+// }
+//
+// At the point of evaluating `a`, the struct is not yet closed. However,
+// descending into `d` will trigger the inclusion of defintions which in turn
+// causes the struct to be closed. At this point, it is important to know that
+// `b` originated from an embedding, as otherwise `e` may not be allowed.
+//
+type acceptor struct {
+ tree *CloseDef
+ fields []fieldSet
+ isClosed bool
+ isList bool
+ openList bool
+}
+
+func (a *acceptor) Accept(c *adt.OpContext, f adt.Feature) bool {
+ if a.isList {
+ return a.openList
+ }
+ if !a.isClosed {
+ return true
+ }
+ if f == adt.InvalidLabel {
+ return false
+ }
+ if f.IsInt() {
+ return a.openList
+ }
+ return a.verifyArcAllowed(c, f) == nil
+}
+
+func (a *acceptor) MatchAndInsert(c *adt.OpContext, v *adt.Vertex) {
+ for _, fs := range a.fields {
+ fs.MatchAndInsert(c, v)
+ }
+}
+
+// CloseDef defines how individual FieldSets (corresponding to conjuncts)
+// combine to determine whether a field is contained in a closed set.
+//
+// Nodes with a non-empty List and IsAnd is false represent embeddings.
+// The ID is the node that contained the embedding in that case.
+//
+// Nodes with a non-empty List and IsAnd is true represent conjunctions of
+// definitions. In this case, a field must be contained in each definition.
+//
+// If a node has both conjunctions of definitions and embeddings, only the
+// former are maintained. Conjunctions of definitions define an upper bound
+// of the set of allowed fields in that case and the embeddings will not add
+// any value.
+type CloseDef struct {
+ ID uint32
+ IsAnd bool
+ List []*CloseDef
+}
+
+// isOr reports whether this is a node representing embeddings.
+func isOr(c *CloseDef) bool {
+ return len(c.List) > 0 && !c.IsAnd
+}
+
+// updateClosed transforms c into a new node with all non-AND nodes with an
+// ID matching one in replace substituted with the replace value.
+//
+// Vertex only keeps track of a flat list of conjuncts and does not keep track
+// of the hierarchy of how these were derived. This function allows rewriting
+// a CloseDef tree based on replacement information gathered during evaluation
+// of this flat list.
+//
+func updateClosed(c *CloseDef, replace map[uint32]*CloseDef) *CloseDef { // used in eval.go
+ switch {
+ case c == nil:
+ and := []*CloseDef{}
+ for _, c := range replace {
+ and = append(and, c)
+ }
+ switch len(and) {
+ case 0:
+ case 1:
+ c = and[0]
+ default:
+ c = &CloseDef{IsAnd: true, List: and}
+ }
+ // needClose
+ case len(replace) > 0:
+ c = updateClosedRec(c, replace)
+ }
+ return c
+}
+
+func updateClosedRec(c *CloseDef, replace map[uint32]*CloseDef) *CloseDef {
+ if c == nil {
+ return nil
+ }
+
+ // If c is a leaf or AND node, replace it outright. If both are an OR node,
+ // merge the lists.
+ if len(c.List) == 0 || !c.IsAnd {
+ if sub := replace[c.ID]; sub != nil {
+ if isOr(sub) && isOr(c) {
+ sub.List = append(sub.List, c.List...)
+ }
+ return sub
+ }
+ }
+
+ changed := false
+ buf := make([]*CloseDef, len(c.List))
+ k := 0
+ for _, c := range c.List {
+ n := updateClosedRec(c, replace)
+ changed = changed || n != c
+ if n != nil {
+ buf[k] = n
+ k++
+ }
+ }
+ if !changed {
+ return c
+ }
+
+ if k == 1 {
+ return buf[0]
+ }
+
+ return &CloseDef{ID: c.ID, IsAnd: c.IsAnd, List: buf[:k]}
+}
+
+// UpdateReplace is called after evaluating a conjunct at the top of the arc
+// to update the replacement information with the gathered CloseDef info.
+func (n *nodeContext) updateReplace(env *adt.Environment) { // used in eval.go
+ if n.newClose == nil {
+ return
+ }
+
+ if n.replace == nil {
+ n.replace = make(map[uint32]*CloseDef)
+ }
+
+ id := uint32(0)
+ if env != nil {
+ id = env.CloseID
+ }
+
+ n.replace[id] = updateClose(n.replace[id], n.newClose)
+ n.newClose = nil
+}
+
+// appendList creates a new CloseDef with the elements of the list of orig
+// and updated appended. It will take the ID of orig. It does not alter
+// either orig or update.
+func appendLists(orig, update *CloseDef) *CloseDef {
+ list := make([]*CloseDef, len(orig.List)+len(update.List))
+ copy(list[copy(list, orig.List):], update.List)
+ c := *orig
+ c.List = list
+ return &c
+}
+
+// updateClose merges update into orig without altering either.
+//
+// The merge takes into account whether it is an embedding node or not.
+// Most notably, if an "And" node is combined with an embedding, the
+// embedding information may be discarded.
+func updateClose(orig, update *CloseDef) *CloseDef {
+ switch {
+ case orig == nil:
+ return update
+ case isOr(orig):
+ if !isOr(update) {
+ return update
+ }
+ return appendLists(orig, update)
+ case isOr(update):
+ return orig
+ case len(orig.List) == 0 && len(update.List) == 0:
+ return &CloseDef{IsAnd: true, List: []*CloseDef{orig, update}}
+ case len(orig.List) == 0:
+ update.List = append(update.List, orig)
+ return update
+ default: // isAnd(orig)
+ return appendLists(orig, update)
+ }
+}
+
+func (n *nodeContext) addAnd(c *CloseDef) { // used in eval.go
+ switch {
+ case n.newClose == nil:
+ n.newClose = c
+ case isOr(n.newClose):
+ n.newClose = c
+ case len(n.newClose.List) == 0:
+ n.newClose = &CloseDef{
+ IsAnd: true,
+ List: []*CloseDef{n.newClose, c},
+ }
+ default:
+ n.newClose.List = append(n.newClose.List, c)
+ }
+}
+
+func (n *nodeContext) addOr(parentID uint32, c *CloseDef) { // used in eval.go
+ switch {
+ case n.newClose == nil:
+ d := &CloseDef{ID: parentID, List: []*CloseDef{{ID: parentID}}}
+ if c != nil {
+ d.List = append(d.List, c)
+ }
+ n.newClose = d
+ case isOr(n.newClose):
+ d := n.newClose
+ if c != nil {
+ d.List = append(d.List, c)
+ }
+ }
+}
+
+// verifyArcAllowed checks whether f is an allowed label within the current
+// node. It traverses c considering the "or" semantics of embeddings and the
+// "and" semantics of conjunctions. It generates an error if a field is not
+// allowed.
+func (n *acceptor) verifyArcAllowed(ctx *adt.OpContext, f adt.Feature) *adt.Bottom {
+ filter := f.IsString() || f == adt.InvalidLabel
+ if filter && !n.verifyArcRecursive(ctx, n.tree, f) {
+ label := f.SelectorString(ctx)
+ return &adt.Bottom{
+ Err: errors.Newf(token.NoPos, "field `%s` not allowed", label),
+ }
+ }
+ return nil
+}
+
+func (n *acceptor) verifyArcRecursive(ctx *adt.OpContext, c *CloseDef, f adt.Feature) bool {
+ if len(c.List) == 0 {
+ return n.verifyDefinition(ctx, c.ID, f)
+ }
+ if c.IsAnd {
+ for _, c := range c.List {
+ if !n.verifyArcRecursive(ctx, c, f) {
+ return false
+ }
+ }
+ return true
+ }
+ for _, c := range c.List {
+ if n.verifyArcRecursive(ctx, c, f) {
+ return true
+ }
+ }
+ return false
+}
+
+// verifyDefintion reports whether f is a valid member for any of the fieldSets
+// with the same closeID.
+func (n *acceptor) verifyDefinition(ctx *adt.OpContext, closeID uint32, f adt.Feature) (ok bool) {
+ for _, o := range n.fields {
+ if o.env.CloseID != closeID {
+ continue
+ }
+
+ if len(o.additional) > 0 || o.isOpen {
+ return true
+ }
+
+ for _, g := range o.fields {
+ if f == g.label {
+ return true
+ }
+ }
+
+ for _, b := range o.bulk {
+ if b.check.Match(ctx, f) {
+ return true
+ }
+ }
+ }
+ return false
+}
diff --git a/internal/core/eval/closed_test.go b/internal/core/eval/closed_test.go
new file mode 100644
index 0000000..144ecce
--- /dev/null
+++ b/internal/core/eval/closed_test.go
@@ -0,0 +1,135 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+ "testing"
+
+ "github.com/google/go-cmp/cmp"
+)
+
+func TestRewriteClosed(t *testing.T) {
+ testCases := []struct {
+ desc string
+ close *CloseDef
+ replace map[uint32]*CloseDef
+ want *CloseDef
+ }{{
+ desc: "a: #A & #B",
+ close: &CloseDef{
+ ID: 1,
+ },
+ replace: map[uint32]*CloseDef{
+ 1: {ID: 1, IsAnd: true, List: []*CloseDef{{ID: 2}, {ID: 3}}},
+ },
+ want: &CloseDef{
+ ID: 0x01,
+ IsAnd: true,
+ List: []*CloseDef{{ID: 2}, {ID: 3}},
+ },
+ }, {
+ // Eliminate an embedding for which there are no more entries.
+ // desc: "eliminateOneEmbedding",
+ // close: &CloseDef{
+ // ID: 0,
+ // List: []*CloseDef{
+ // {ID: 2},
+ // {ID: 3},
+ // },
+ // },
+ // replace: map[uint32]*CloseDef{2: nil},
+ // want: &CloseDef{ID: 2},
+ // }, {
+ // Do not eliminate an embedding that has a replacement.
+ desc: "eliminateOneEmbeddingByMultiple",
+ close: &CloseDef{
+ ID: 0,
+ List: []*CloseDef{
+ {ID: 2},
+ {ID: 3},
+ },
+ },
+ replace: map[uint32]*CloseDef{
+ 2: nil,
+ 3: {ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
+ },
+ want: &CloseDef{
+ ID: 0x00,
+ List: []*CloseDef{
+ {ID: 2},
+ {ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
+ },
+ },
+ }, {
+ // Select b within a
+ // a: { // ID: 0
+ // #A // ID: 1
+ // #B // ID: 2
+ // b: #C // ID: 0
+ // }
+ // #C: {
+ // b: #D // ID: 3
+ // }
+ //
+ desc: "embeddingOverruledByField",
+ close: &CloseDef{
+ ID: 0,
+ List: []*CloseDef{
+ {ID: 1},
+ {ID: 2},
+ {ID: 0},
+ },
+ },
+ replace: map[uint32]*CloseDef{0: {ID: 3}},
+ want: &CloseDef{ID: 3},
+ }, {
+ // Select b within a
+ // a: { // ID: 0
+ // #A // ID: 1
+ // #B // ID: 2
+ // b: #C // ID: 0
+ // }
+ // #C: {
+ // b: #D & #E // ID: 3 & 4
+ // }
+ //
+ desc: "embeddingOverruledByMultiple",
+ close: &CloseDef{
+ ID: 0,
+ List: []*CloseDef{
+ {ID: 1},
+ {ID: 2},
+ {ID: 0},
+ },
+ },
+ replace: map[uint32]*CloseDef{
+ 0: {IsAnd: true, List: []*CloseDef{{ID: 3}, {ID: 4}}},
+ },
+ want: &CloseDef{
+ ID: 0,
+ IsAnd: true,
+ List: []*CloseDef{{ID: 3}, {ID: 4}},
+ },
+ }}
+
+ for _, tc := range testCases {
+ t.Run(tc.desc, func(t *testing.T) {
+ got := updateClosed(tc.close, tc.replace)
+ if !cmp.Equal(got, tc.want) {
+ t.Error(cmp.Diff(got, tc.want))
+ }
+ })
+ }
+}
diff --git a/internal/core/eval/disjunct.go b/internal/core/eval/disjunct.go
new file mode 100644
index 0000000..db5df3c
--- /dev/null
+++ b/internal/core/eval/disjunct.go
@@ -0,0 +1,376 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+ "sort"
+
+ "cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/token"
+ "cuelang.org/go/internal/core/adt"
+)
+
+// Nodes man not reenter a disjunction.
+//
+// Copy one layer deep; throw away items on failure.
+
+// DISJUNCTION ALGORITHM
+//
+// The basic concept of the algorithm is to use backtracking to find valid
+// disjunctions. The algorithm can stop if two matching disjuncts are found
+// where one does not subsume the other.
+//
+// At a later point, we can introduce a filter step to filter out possible
+// disjuncts based on, say, discriminator fields or field exclusivity (oneOf
+// fields in Protobuf).
+//
+// To understand the details of the algorithm, it is important to understand
+// some properties of disjunction.
+//
+//
+// EVALUATION OF A DISJUNCTION IS SELF CONTAINED
+//
+// In other words, fields outside of a disjunction cannot bind to values within
+// a disjunction whilst evaluating that disjunction. This allows the computation
+// of disjunctions to be isolated from side effects.
+//
+// The intuition behind this is as follows: as a disjunction is not a concrete
+// value, it is not possible to lookup a field within a disjunction if it has
+// not yet been evaluated. So if a reference within a disjunction that is needed
+// to disambiguate that disjunction refers to a field outside the scope of the
+// disjunction which, in turn, refers to a field within the disjunction, this
+// results in a cycle error. We achieve this by not removing the cycle marker of
+// the Vertex of the disjunction until the disjunction is resolved.
+//
+// Note that the following disjunct is still allowed:
+//
+// a: 1
+// b: a
+//
+// Even though `a` refers to the root of the disjunction, it does not _select
+// into_ the disjunction. Implementation-wise, it also doesn't have to, as the
+// respective vertex is available within the Environment. Referencing a node
+// outside the disjunction that in turn selects the disjunction root, however,
+// will result in a detected cycle.
+//
+// As usual, cycle detection should be interpreted marked as incomplete, so that
+// the referring node will not be fixed to an error prematurely.
+//
+//
+// SUBSUMPTION OF AMBIGUOUS DISJUNCTS
+//
+// A disjunction can be evaluated to a concrete value if only one disjunct
+// remains. Aside from disambiguating through unification failure, disjuncts
+// may also be disambiguated by taking the least specific of two disjuncts.
+// For instance, if a subsumes b, then the result of disjunction may be a.
+//
+// NEW ALGORITHM NO LONGER VERIFIES SUBSUMPTION. SUBSUMPTION IS INHERENTLY
+// IMPRECISE (DUE TO BULK OPTIONAL FIELDS). OTHER THAN THAT, FOR SCALAR VALUES
+// IT JUST MEANS THERE IS AMBIGUITY, AND FOR STRUCTS IT CAN LEAD TO STRANGE
+// CONSEQUENCES.
+//
+// USE EQUALITY INSTEAD:
+// - Undefined == error for optional fields.
+// - So only need to check exact labels for vertices.
+
+type envDisjunct struct {
+ env *adt.Environment
+ values []disjunct
+ numDefaults int
+ cloneID uint32
+ isEmbed bool
+}
+
+type disjunct struct {
+ expr adt.Expr
+ isDefault bool
+}
+
+func (n *nodeContext) addDisjunction(env *adt.Environment, x *adt.DisjunctionExpr, cloneID uint32, isEmbed bool) {
+ a := []disjunct{}
+
+ numDefaults := 0
+ for _, v := range x.Values {
+ isDef := v.Default // || n.hasDefaults(env, v.Val)
+ if isDef {
+ numDefaults++
+ }
+ a = append(a, disjunct{v.Val, isDef})
+ }
+
+ sort.SliceStable(a, func(i, j int) bool {
+ return !a[j].isDefault && a[i].isDefault != a[j].isDefault
+ })
+
+ n.disjunctions = append(n.disjunctions,
+ envDisjunct{env, a, numDefaults, cloneID, isEmbed})
+}
+
+func (n *nodeContext) addDisjunctionValue(env *adt.Environment, x *adt.Disjunction, cloneID uint32, isEmbed bool) {
+ a := []disjunct{}
+
+ for i, v := range x.Values {
+ a = append(a, disjunct{v, i < x.NumDefaults})
+ }
+
+ n.disjunctions = append(n.disjunctions,
+ envDisjunct{env, a, x.NumDefaults, cloneID, isEmbed})
+}
+
+func (n *nodeContext) updateResult() (isFinal bool) {
+ n.postDisjunct()
+
+ if n.hasErr() {
+ return n.isFinal
+ }
+
+ d := n.nodeShared.disjunct
+ if d == nil {
+ d = &adt.Disjunction{}
+ n.nodeShared.disjunct = d
+ }
+
+ result := *n.node
+ if result.Value == nil {
+ result.Value = n.getValidators()
+ }
+
+ for _, v := range d.Values {
+ if Equal(n.ctx, v, &result) {
+ return isFinal
+ }
+ }
+
+ p := &result
+ d.Values = append(d.Values, p)
+ if n.defaultMode == isDefault {
+ // Keep defaults sorted first.
+ i := d.NumDefaults
+ j := i + 1
+ copy(d.Values[j:], d.Values[i:])
+ d.Values[i] = p
+ d.NumDefaults = j
+ }
+
+ // return n.isFinal
+
+ switch {
+ case !n.nodeShared.hasResult():
+
+ case n.nodeShared.isDefault() && n.defaultMode != isDefault:
+ return n.isFinal
+
+ case !n.nodeShared.isDefault() && n.defaultMode == isDefault:
+
+ default:
+ if Equal(n.ctx, n.node, n.result()) {
+ return n.isFinal
+ }
+
+ // TODO: Compute fancy error message.
+ n.nodeShared.resultNode = n
+ // n.nodeShared.result.AddErr(n.ctx, &adt.Bottom{
+ // Code: adt.IncompleteError,
+ // Err: errors.Newf(n.ctx.Pos(), "ambiguous disjunction"),
+ // })
+ n.nodeShared.result_.Arcs = nil
+ n.nodeShared.result_.Structs = nil
+ return n.isFinal // n.defaultMode == isDefault
+ }
+
+ n.nodeShared.resultNode = n
+ n.nodeShared.setResult(n.node)
+
+ return n.isFinal
+}
+
+func (n *nodeContext) tryDisjuncts() (finished bool) {
+ if !n.insertDisjuncts() || !n.updateResult() {
+ if !n.isFinal {
+ return false // More iterations to do.
+ }
+ }
+
+ if n.nodeShared.hasResult() {
+ return true // found something
+ }
+
+ if len(n.disjunctions) > 0 {
+ b := &adt.Bottom{
+ // TODO(errors): we should not make this error worse by discarding
+ // the type or error. Using IncompleteError is a compromise. But
+ // really we should keep track of the errors and return a more
+ // accurate result here.
+ Code: adt.IncompleteError,
+ Err: errors.Newf(token.NoPos, "empty disjunction"),
+ }
+ n.node.AddErr(n.ctx, b)
+ }
+ return true
+}
+
+// TODO: add proper conjuncts for the ones used by the disjunctions to replace
+// the original source.
+//
+func (n *nodeContext) insertDisjuncts() (inserted bool) {
+ p := 0
+ inserted = true
+
+ disjunctions := []envDisjunct{}
+
+ // fmt.Println("----", debug.NodeString(n.ctx, n.node, nil))
+ for _, d := range n.disjunctions {
+ disjunctions = append(disjunctions, d)
+
+ sub := len(n.disjunctions)
+ defMode, ok := n.insertSingleDisjunct(p, d, false)
+ p++
+ if !ok {
+ inserted = false
+ break
+ }
+
+ subMode := []defaultMode{}
+ for ; sub < len(n.disjunctions); sub++ {
+ d := n.disjunctions[sub]
+ disjunctions = append(disjunctions, d)
+ mode, ok := n.insertSingleDisjunct(p, d, true)
+ p++
+ if !ok {
+ inserted = false
+ break
+ }
+ subMode = append(subMode, mode)
+ }
+ for i := len(subMode) - 1; i >= 0; i-- {
+ defMode = combineSubDefault(defMode, subMode[i])
+ }
+
+ // fmt.Println("RESMODE", defMode, combineDefault(n.defaultMode, defMode))
+
+ n.defaultMode = combineDefault(n.defaultMode, defMode)
+ }
+
+ // Find last disjunction at which there is no overflow.
+ for ; p > 0 && n.stack[p-1]+1 >= len(disjunctions[p-1].values); p-- {
+ }
+ if p > 0 {
+ // Increment a valid position and set all subsequent entries to 0.
+ n.stack[p-1]++
+ n.stack = n.stack[:p]
+ }
+ return inserted
+}
+
+func (n *nodeContext) insertSingleDisjunct(p int, d envDisjunct, isSub bool) (mode defaultMode, ok bool) {
+ if p >= len(n.stack) {
+ n.stack = append(n.stack, 0)
+ }
+
+ k := n.stack[p]
+ v := d.values[k]
+ n.isFinal = n.isFinal && k == len(d.values)-1
+ c := adt.MakeConjunct(d.env, v.expr)
+ n.addExprConjunct(c, d.cloneID, d.isEmbed)
+
+ for n.expandOne() {
+ }
+
+ switch {
+ case d.numDefaults == 0:
+ mode = maybeDefault
+ case v.isDefault:
+ mode = isDefault
+ default:
+ mode = notDefault
+ }
+
+ return mode, !n.hasErr()
+}
+
+// Default rules from spec:
+//
+// U1: (v1, d1) & v2 => (v1&v2, d1&v2)
+// U2: (v1, d1) & (v2, d2) => (v1&v2, d1&d2)
+//
+// D1: (v1, d1) | v2 => (v1|v2, d1)
+// D2: (v1, d1) | (v2, d2) => (v1|v2, d1|d2)
+//
+// M1: *v => (v, v)
+// M2: *(v1, d1) => (v1, d1)
+// or
+// M2: *(v1, d1) => (v1, v1)
+// or
+// M2: *(v1, d1) => v1 if d1 == _|_
+// M2: d1 otherwise
+//
+// def + maybe -> def
+// not + maybe -> def
+// not + def -> def
+
+type defaultMode int
+
+const (
+ maybeDefault defaultMode = iota
+ notDefault
+ isDefault
+)
+
+func combineSubDefault(a, b defaultMode) defaultMode {
+ switch {
+ case a == maybeDefault && b == maybeDefault:
+ return maybeDefault
+ case a == maybeDefault && b == notDefault:
+ return notDefault
+ case a == maybeDefault && b == isDefault:
+ return isDefault
+ case a == notDefault && b == maybeDefault:
+ return notDefault
+ case a == notDefault && b == notDefault:
+ return notDefault
+ case a == notDefault && b == isDefault:
+ return isDefault
+ case a == isDefault && b == maybeDefault:
+ return isDefault
+ case a == isDefault && b == notDefault:
+ return notDefault
+ case a == isDefault && b == isDefault:
+ return isDefault
+ default:
+ panic("unreachable")
+ }
+}
+
+func combineDefault(a, b defaultMode) defaultMode {
+ if a > b {
+ a, b = b, a
+ }
+ switch {
+ case a == maybeDefault && b == maybeDefault:
+ return maybeDefault
+ case a == maybeDefault && b == notDefault:
+ return notDefault
+ case a == maybeDefault && b == isDefault:
+ return isDefault
+ case a == notDefault && b == notDefault:
+ return notDefault
+ case a == notDefault && b == isDefault:
+ return notDefault
+ case a == isDefault && b == isDefault:
+ return isDefault
+ default:
+ panic("unreachable")
+ }
+}
diff --git a/internal/core/eval/equality.go b/internal/core/eval/equality.go
new file mode 100644
index 0000000..cb9f9da
--- /dev/null
+++ b/internal/core/eval/equality.go
@@ -0,0 +1,134 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import "cuelang.org/go/internal/core/adt"
+
+func Equal(ctx *adt.OpContext, v, w adt.Value) bool {
+ if x, ok := v.(*adt.Vertex); ok {
+ return equalVertex(ctx, x, w)
+ }
+ if y, ok := w.(*adt.Vertex); ok {
+ return equalVertex(ctx, y, v)
+ }
+ return equalTerminal(ctx, v, w)
+}
+
+func equalVertex(ctx *adt.OpContext, x *adt.Vertex, v adt.Value) bool {
+ y, ok := v.(*adt.Vertex)
+ if !ok {
+ return false
+ }
+ if x == y {
+ return true
+ }
+ if len(x.Arcs) != len(y.Arcs) {
+ return false
+ }
+ if len(x.Arcs) == 0 && len(y.Arcs) == 0 {
+ return equalTerminal(ctx, x.Value, y.Value)
+ }
+
+loop1:
+ for _, a := range x.Arcs {
+ for _, b := range y.Arcs {
+ if a.Label == b.Label {
+ if !Equal(ctx, a, b) {
+ return false
+ }
+ continue loop1
+ }
+ }
+ return false
+ }
+
+ // We do not need to do the following check, because of the pigeon-hole principle.
+ // loop2:
+ // for _, b := range y.Arcs {
+ // for _, a := range x.Arcs {
+ // if a.Label == b.Label {
+ // continue loop2
+ // }
+ // }
+ // return false
+ // }
+
+ return equalTerminal(ctx, x.Value, y.Value)
+}
+
+func equalTerminal(ctx *adt.OpContext, v, w adt.Value) bool {
+ if v == w {
+ return true
+ }
+ switch x := v.(type) {
+ case *adt.Num, *adt.String, *adt.Bool, *adt.Bytes:
+ if b, ok := adt.BinOp(ctx, adt.EqualOp, v, w).(*adt.Bool); ok {
+ return b.B
+ }
+ return false
+
+ // TODO: for the remainder we are dealing with non-concrete values, so we
+ // could also just not bother.
+
+ case *adt.BoundValue:
+ if y, ok := w.(*adt.BoundValue); ok {
+ return x.Op == y.Op && Equal(ctx, x.Value, y.Value)
+ }
+
+ case *adt.BasicType:
+ if y, ok := w.(*adt.BasicType); ok {
+ return x.K == y.K
+ }
+
+ case *adt.Conjunction:
+ y, ok := w.(*adt.Conjunction)
+ if !ok || len(x.Values) != len(y.Values) {
+ return false
+ }
+ // always ordered the same
+ for i, xe := range x.Values {
+ if !Equal(ctx, xe, y.Values[i]) {
+ return false
+ }
+ }
+ return true
+
+ case *adt.Disjunction:
+ // The best way to compute this is with subsumption, but even that won't
+ // be too accurate. Assume structural equivalence for now.
+ y, ok := w.(*adt.Disjunction)
+ if !ok || len(x.Values) != len(y.Values) {
+ return false
+ }
+ for i, xe := range x.Values {
+ if !Equal(ctx, xe, y.Values[i]) {
+ return false
+ }
+ }
+ return true
+
+ case *adt.ListMarker:
+ _, ok := w.(*adt.ListMarker)
+ return ok
+
+ case *adt.StructMarker:
+ _, ok := w.(*adt.StructMarker)
+ return ok
+
+ case *adt.BuiltinValidator:
+ }
+
+ return false
+}
diff --git a/internal/core/eval/eval.go b/internal/core/eval/eval.go
new file mode 100644
index 0000000..28fe99b
--- /dev/null
+++ b/internal/core/eval/eval.go
@@ -0,0 +1,1504 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package eval contains the high level CUE evaluation strategy.
+//
+// CUE allows for a significant amount of freedom in order of evaluation due to
+// the commutativity of the unification operation. This package implements one
+// of the possible strategies.
+package eval
+
+// TODO:
+// - result should be nodeContext: this allows optionals info to be extracted
+// and computed.
+//
+
+import (
+ "fmt"
+
+ "cuelang.org/go/cue/ast"
+ "cuelang.org/go/cue/errors"
+ "cuelang.org/go/cue/token"
+ "cuelang.org/go/internal/core/adt"
+ "cuelang.org/go/internal/core/debug"
+)
+
+func Evaluate(r adt.Runtime, v *adt.Vertex) {
+ format := func(n adt.Node) string {
+ return debug.NodeString(r, n, printConfig)
+ }
+ e := New(r)
+ c := adt.New(v, &adt.Config{
+ Runtime: r,
+ Unifier: e,
+ Format: format,
+ })
+ e.Unify(c, v, adt.Finalized)
+}
+
+func New(r adt.Runtime) *Evaluator {
+ return &Evaluator{r: r, index: r}
+}
+
+// TODO: Note: NewContext takes essentially a cue.Value. By making this
+// type more central, we can perhaps avoid context creation.
+
+func NewContext(r adt.Runtime, v *adt.Vertex) *adt.OpContext {
+ e := New(r)
+ return e.NewContext(v)
+}
+
+var printConfig = &debug.Config{Compact: true}
+
+func (e *Evaluator) NewContext(v *adt.Vertex) *adt.OpContext {
+ format := func(n adt.Node) string {
+ return debug.NodeString(e.r, n, printConfig)
+ }
+ return adt.New(v, &adt.Config{
+ Runtime: e.r,
+ Unifier: e,
+ Format: format,
+ })
+}
+
+var structSentinel = &adt.StructMarker{}
+
+var incompleteSentinel = &adt.Bottom{
+ Code: adt.IncompleteError,
+ Err: errors.Newf(token.NoPos, "incomplete"),
+}
+
+type Evaluator struct {
+ r adt.Runtime
+ index adt.StringIndexer
+ closeID uint32
+}
+
+func (e *Evaluator) nextID() uint32 {
+ e.closeID++
+ return e.closeID
+}
+
+func (e *Evaluator) Eval(v *adt.Vertex) errors.Error {
+ if v.Value == nil {
+ ctx := adt.NewContext(e.r, e, v)
+ e.Unify(ctx, v, adt.Finalized)
+ }
+
+ // extract error if needed.
+ return nil
+}
+
+// Evaluate is used to evaluate a sub expression while evaluating a Vertex
+// with Unify. It may or may not return the original Vertex. It may also
+// terminate evaluation early if it has enough evidence that a certain value
+// can be the only value in a valid configuration. This means that an error
+// may go undetected at this point, as long as it is caught later.
+//
+func (e *Evaluator) Evaluate(c *adt.OpContext, v *adt.Vertex) adt.Value {
+ var resultValue adt.Value
+ if v.Value == nil {
+ save := *v
+ // Use node itself to allow for cycle detection.
+ s := e.evalVertex(c, v, adt.Partial)
+
+ if d := s.disjunct; d != nil && len(d.Values) > 1 && d.NumDefaults != 1 {
+ v.Value = d
+ v.Arcs = nil
+ v.Structs = nil // TODO: maybe not do this.
+ // The conjuncts will have too much information. Better have no
+ // information than incorrect information.
+ for _, d := range d.Values {
+ d.Conjuncts = nil
+ }
+ }
+
+ resultValue = v.Value
+
+ result := s.result()
+ *v = save
+
+ if result.Value != nil {
+ *v = *result
+ resultValue = result.Value
+ }
+
+ // TODO: this seems unnecessary as long as we have a better way
+ // to handle incomplete, and perhaps referenced. nodes.
+ if c.IsTentative() && isStruct(v) {
+ // TODO(perf): do something more efficient perhaps? This discards
+ // the computed arcs so far. Instead, we could have a separate
+ // marker to accumulate results. As this only happens within
+ // comprehensions, the effect is likely minimal, though.
+ arcs := v.Arcs
+ *v = save
+ return &adt.Vertex{
+ Parent: v.Parent,
+ Value: &adt.StructMarker{},
+ Arcs: arcs,
+ }
+ }
+ // *v = save // DO NOT ADD.
+ err, _ := resultValue.(*adt.Bottom)
+ // BEFORE RESTORING, copy the value to return one
+ // with the temporary arcs.
+ if !s.done() && (err == nil || err.IsIncomplete()) {
+ // Clear values afterwards
+ *v = save
+ }
+ if !s.done() && s.hasDisjunction() {
+ return &adt.Bottom{Code: adt.IncompleteError}
+ }
+ if s.hasResult() {
+ if b, _ := v.Value.(*adt.Bottom); b != nil {
+ *v = save
+ return b
+ }
+ // TODO: Only use result when not a cycle.
+ v = result
+ }
+ // TODO: Store if concrete and fully resolved.
+
+ } else {
+ b, _ := v.Value.(*adt.Bottom)
+ if b != nil {
+ return b
+ }
+ }
+
+ switch v.Value.(type) {
+ case nil:
+ // Error saved in result.
+ return resultValue // incomplete
+
+ case *adt.ListMarker, *adt.StructMarker:
+ return v
+
+ default:
+ return v.Value
+ }
+}
+
+// Unify implements adt.Unifier.
+//
+// May not evaluate the entire value, but just enough to be able to compute.
+//
+// Phase one: record everything concrete
+// Phase two: record incomplete
+// Phase three: record cycle.
+func (e *Evaluator) Unify(c *adt.OpContext, v *adt.Vertex, state adt.VertexStatus) {
+ // defer c.PopVertex(c.PushVertex(v))
+
+ if state <= v.Status()+1 {
+ return
+ }
+
+ if x := v.Value; x != nil {
+ // if state == adt.Partial || x == cycle {
+ // return
+ // }
+ return
+ }
+
+ n := e.evalVertex(c, v, state)
+
+ switch d := n.disjunct; {
+ case d != nil && len(d.Values) == 1:
+ *v = *(d.Values[0])
+
+ case d != nil && len(d.Values) > 0:
+ v.Value = d
+ v.Arcs = nil
+ v.Structs = nil
+ // The conjuncts will have too much information. Better have no
+ // information than incorrect information.
+ for _, d := range d.Values {
+ d.Conjuncts = nil
+ }
+
+ default:
+ if r := n.result(); r.Value != nil {
+ *v = *r
+ }
+ }
+
+ // Else set it to something.
+
+ if v.Value == nil {
+ panic("errer")
+ }
+
+ // Check whether result is done.
+}
+
+// evalVertex computes the vertex results. The state indicates the minimum
+// status to which this vertex should be evaluated. It should be either
+// adt.Finalized or adt.Partial.
+func (e *Evaluator) evalVertex(c *adt.OpContext, v *adt.Vertex, state adt.VertexStatus) *nodeShared {
+ // fmt.Println(debug.NodeString(c.StringIndexer, v, nil))
+ shared := &nodeShared{
+ ctx: c,
+ eval: e,
+ node: v,
+ stack: nil, // silence linter
+ }
+ saved := *v
+
+ for i := 0; ; i++ {
+
+ // Clear any remaining error.
+ if err := c.Err(); err != nil {
+ panic("uncaught error")
+ }
+
+ // Set the cache to a cycle error to ensure a cyclic reference will result
+ // in an error if applicable. A cyclic error may be ignored for
+ // non-expression references. The cycle error may also be removed as soon
+ // as there is evidence what a correct value must be, but before all
+ // validation has taken place.
+ *v = saved
+ v.Value = cycle
+ v.UpdateStatus(adt.Evaluating)
+
+ // If the result is a struct, it needs to be closed if:
+ // 1) this node introduces a definition
+ // 2) this node is a child of a node that introduces a definition,
+ // recursively.
+ // 3) this node embeds a closed struct.
+ needClose := v.Label.IsDef()
+
+ n := &nodeContext{
+ kind: adt.TopKind,
+ nodeShared: shared,
+ needClose: needClose,
+
+ // These get cleared upon proof to the contrary.
+ // isDefault: true,
+ isFinal: true,
+ }
+
+ closeID := uint32(0)
+
+ for _, x := range v.Conjuncts {
+ closeID := closeID
+ // TODO: needed for reentrancy. Investigate usefulness for cycle
+ // detection.
+ if x.Env != nil && x.Env.CloseID != 0 {
+ closeID = x.Env.CloseID
+ }
+ n.addExprConjunct(x, closeID, true)
+ }
+
+ if i == 0 {
+ // Use maybeSetCache for cycle breaking
+ for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() {
+ }
+ if v.Status() > adt.Evaluating && state <= adt.Partial {
+ // We have found a partial result. There may still be errors
+ // down the line which may result from further evaluating this
+ // field, but that will be caught when evaluating this field
+ // for real.
+ shared.setResult(v)
+ return shared
+ }
+ if !n.done() && len(n.disjunctions) > 0 && isEvaluating(v) {
+ // We disallow entering computations of disjunctions with
+ // incomplete data.
+ b := c.NewErrf("incomplete cause disjunction")
+ b.Code = adt.IncompleteError
+ v.SetValue(n.ctx, adt.Finalized, b)
+ shared.setResult(v)
+ return shared
+ }
+ }
+
+ // Handle disjunctions. If there are no disjunctions, this call is
+ // equivalent to calling n.postDisjunct.
+ if n.tryDisjuncts() {
+ if v.Value == nil {
+ v.Value = n.getValidators()
+ }
+
+ break
+ }
+ }
+
+ return shared
+}
+
+func isStruct(v *adt.Vertex) bool {
+ _, ok := v.Value.(*adt.StructMarker)
+ return ok
+}
+
+func (n *nodeContext) postDisjunct() {
+ ctx := n.ctx
+
+ // Use maybeSetCache for cycle breaking
+ for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() {
+ }
+
+ // TODO: preparation for association lists:
+ // We assume that association types may not be created dynamically for now.
+ // So we add lists
+ n.addLists(ctx)
+
+ switch err := n.getErr(); {
+ case err != nil:
+ n.node.Value = err
+ n.errs = nil
+
+ default:
+ if isEvaluating(n.node) {
+ // TODO: this does not yet validate all values.
+
+ if !n.done() { // && !ctx.IsTentative() {
+ // collect incomplete errors.
+ // len(n.ifClauses) == 0 &&
+ // len(n.forClauses) == 0 &&
+ var err *adt.Bottom // n.incomplete
+ // err = n.incomplete
+ for _, d := range n.dynamicFields {
+ x, _ := ctx.Concrete(d.env, d.field.Key, "dynamic field")
+ b, _ := x.(*adt.Bottom)
+ err = adt.CombineErrors(nil, err, b)
+ }
+ for _, c := range n.forClauses {
+ f := func(env *adt.Environment, st *adt.StructLit) {}
+ err = adt.CombineErrors(nil, err, ctx.Yield(c.env, c.yield, f))
+ }
+ for _, x := range n.exprs {
+ x, _ := ctx.Evaluate(x.Env, x.Expr())
+ b, _ := x.(*adt.Bottom)
+ err = adt.CombineErrors(nil, err, b)
+ }
+ if err == nil {
+ // safeguard.
+ err = incompleteSentinel
+ }
+ n.node.Value = err
+ } else {
+ n.node.Value = nil
+ }
+ }
+
+ // We are no longer evaluating.
+ n.node.UpdateStatus(adt.Partial)
+
+ // Either set to Conjunction or error.
+ var v adt.Value = n.node.Value
+ kind := n.kind
+ markStruct := false
+ if n.isStruct {
+ if kind != 0 && kind&adt.StructKind == 0 {
+ n.node.Value = &adt.Bottom{
+ Err: errors.Newf(token.NoPos,
+ "conflicting values struct and %s", n.kind),
+ }
+ }
+ markStruct = true
+ } else if len(n.node.Structs) > 0 {
+ markStruct = kind&adt.StructKind != 0 && !n.hasTop
+ }
+ if v == nil && markStruct {
+ kind = adt.StructKind
+ n.node.Value = &adt.StructMarker{}
+ v = n.node
+ }
+ if v != nil && adt.IsConcrete(v) {
+ if n.scalar != nil {
+ kind = n.scalar.Kind()
+ }
+ if v.Kind()&^kind != 0 {
+ p := token.NoPos
+ if src := v.Source(); src != nil {
+ p = src.Pos()
+ }
+ n.addErr(errors.Newf(p,
+ // TODO(err): position of all value types.
+ "conflicting types",
+ ))
+ }
+ if n.lowerBound != nil {
+ if b := ctx.Validate(n.lowerBound, v); b != nil {
+ n.addBottom(b)
+ }
+ }
+ if n.upperBound != nil {
+ if b := ctx.Validate(n.upperBound, v); b != nil {
+ n.addBottom(b)
+ }
+ }
+ for _, v := range n.checks {
+ if b := ctx.Validate(v, n.node); b != nil {
+ n.addBottom(b)
+ }
+ }
+
+ } else if !ctx.IsTentative() {
+ n.node.Value = n.getValidators()
+ }
+ // else if v == nil {
+ // n.node.Value = incompleteSentinel
+ // }
+
+ if v == nil {
+ break
+ }
+
+ switch {
+ case v.Kind() == adt.ListKind:
+ for _, a := range n.node.Arcs {
+ if a.Label.Typ() == adt.StringLabel {
+ n.addErr(errors.Newf(token.NoPos,
+ // TODO(err): add positions for list and arc definitions.
+ "list may not have regular fields"))
+ }
+ }
+
+ // case !isStruct(n.node) && v.Kind() != adt.BottomKind:
+ // for _, a := range n.node.Arcs {
+ // if a.Label.IsRegular() {
+ // n.addErr(errors.Newf(token.NoPos,
+ // // TODO(err): add positions of non-struct values and arcs.
+ // "cannot combine scalar values with arcs"))
+ // }
+ // }
+ }
+ }
+
+ var c *CloseDef
+ if a, _ := n.node.Closed.(*acceptor); a != nil {
+ c = a.tree
+ n.needClose = n.needClose || a.isClosed
+ }
+
+ updated := updateClosed(c, n.replace)
+ if updated == nil && n.needClose {
+ updated = &CloseDef{}
+ }
+
+ // TODO retrieve from env.
+
+ if err := n.getErr(); err != nil {
+ if b, _ := n.node.Value.(*adt.Bottom); b != nil {
+ err = adt.CombineErrors(nil, b, err)
+ }
+ n.node.Value = err
+ // TODO: add return: if evaluation of arcs is important it can be done
+ // later. Logically we're done.
+ }
+
+ m := &acceptor{
+ tree: updated,
+ fields: n.optionals,
+ isClosed: n.needClose,
+ openList: n.openList,
+ isList: n.node.IsList(),
+ }
+ if updated != nil || len(n.optionals) > 0 {
+ n.node.Closed = m
+ }
+
+ // Visit arcs recursively to validate and compute error.
+ for _, a := range n.node.Arcs {
+ if updated != nil {
+ a.Closed = m
+ }
+ if updated != nil && m.isClosed {
+ if err := m.verifyArcAllowed(n.ctx, a.Label); err != nil {
+ n.node.Value = err
+ }
+ // TODO: use continue to not process already failed fields,
+ // or at least don't record recursive error.
+ // continue
+ }
+ // Call UpdateStatus here to be absolutely sure the status is set
+ // correctly and that we are not regressing.
+ n.node.UpdateStatus(adt.EvaluatingArcs)
+ n.eval.Unify(ctx, a, adt.Finalized)
+ if err, _ := a.Value.(*adt.Bottom); err != nil {
+ n.node.AddChildError(err)
+ }
+ }
+
+ n.node.UpdateStatus(adt.Finalized)
+}
+
+// TODO: this is now a sentinel. Use a user-facing error that traces where
+// the cycle originates.
+var cycle = &adt.Bottom{
+ Err: errors.Newf(token.NoPos, "cycle error"),
+ Code: adt.CycleError,
+}
+
+func isEvaluating(v *adt.Vertex) bool {
+ isCycle := v.Status() == adt.Evaluating
+ if isCycle != (v.Value == cycle) {
+ panic(fmt.Sprintf("cycle data of sync %d vs %#v", v.Status(), v.Value))
+ }
+ return isCycle
+}
+
+type nodeShared struct {
+ eval *Evaluator
+ ctx *adt.OpContext
+ sub []*adt.Environment // Environment cache
+ node *adt.Vertex
+
+ // Disjunction handling
+ disjunct *adt.Disjunction
+ resultNode *nodeContext
+ result_ adt.Vertex
+ stack []int
+}
+
+func (n *nodeShared) result() *adt.Vertex {
+ return &n.result_
+}
+
+func (n *nodeShared) setResult(v *adt.Vertex) {
+ n.result_ = *v
+}
+
+func (n *nodeShared) hasResult() bool {
+ return n.resultNode != nil //|| n.hasResult_
+ // return n.resultNode != nil || n.hasResult_
+}
+
+func (n *nodeShared) done() bool {
+ // if d := n.disjunct; d == nil || len(n.disjunct.Values) == 0 {
+ // return false
+ // }
+ if n.resultNode == nil {
+ return false
+ }
+ return n.resultNode.done()
+}
+
+func (n *nodeShared) hasDisjunction() bool {
+ if n.resultNode == nil {
+ return false
+ }
+ return len(n.resultNode.disjunctions) > 0
+}
+
+func (n *nodeShared) isDefault() bool {
+ if n.resultNode == nil {
+ return false
+ }
+ return n.resultNode.defaultMode == isDefault
+}
+
+// A nodeContext is used to collate all conjuncts of a value to facilitate
+// unification. Conceptually order of unification does not matter. However,
+// order has relevance when performing checks of non-monotic properities. Such
+// checks should only be performed once the full value is known.
+type nodeContext struct {
+ *nodeShared
+
+ // TODO:
+ // filter *adt.Vertex a subset of composite with concrete fields for
+ // bloom-like filtering of disjuncts. We should first verify, however,
+ // whether some breath-first search gives sufficient performance, as this
+ // should already ensure a quick-fail for struct disjunctions with
+ // discriminators.
+
+ // Current value (may be under construction)
+ scalar adt.Value // TODO: use Value in node.
+
+ // Concrete conjuncts
+ kind adt.Kind
+ lowerBound *adt.BoundValue // > or >=
+ upperBound *adt.BoundValue // < or <=
+ checks []adt.Validator // BuiltinValidator, other bound values.
+ errs *adt.Bottom
+ incomplete *adt.Bottom
+
+ // Struct information
+ dynamicFields []envDynamic
+ ifClauses []envYield
+ forClauses []envYield
+ optionals []fieldSet // env + field
+ // NeedClose:
+ // - node starts definition
+ // - embeds a definition
+ // - parent node is closing
+ needClose bool
+ openList bool
+ isStruct bool
+ hasTop bool
+ newClose *CloseDef
+ // closeID uint32 // from parent, or if not exist, new if introducing a def.
+ replace map[uint32]*CloseDef
+
+ // Expression conjuncts
+ lists []envList
+ vLists []*adt.Vertex
+ exprs []conjunct
+
+ // Disjunction handling
+ disjunctions []envDisjunct
+ defaultMode defaultMode
+ isFinal bool
+}
+
+func (n *nodeContext) done() bool {
+ return len(n.dynamicFields) == 0 &&
+ len(n.ifClauses) == 0 &&
+ len(n.forClauses) == 0 &&
+ len(n.exprs) == 0
+}
+
+// hasErr is used to determine if an evaluation path, for instance a single
+// path after expanding all disjunctions, has an error.
+func (n *nodeContext) hasErr() bool {
+ if n.node.ChildErrors != nil {
+ return true
+ }
+ if n.node.Status() > adt.Evaluating && n.node.IsErr() {
+ return true
+ }
+ return n.ctx.HasErr() || n.errs != nil
+}
+
+func (n *nodeContext) getErr() *adt.Bottom {
+ n.errs = adt.CombineErrors(nil, n.errs, n.ctx.Err())
+ return n.errs
+}
+
+// getValidators sets the vertex' Value in case there was no concrete value.
+func (n *nodeContext) getValidators() adt.Value {
+ ctx := n.ctx
+
+ a := []adt.Value{}
+ // if n.node.Value != nil {
+ // a = append(a, n.node.Value)
+ // }
+ kind := adt.TopKind
+ if n.lowerBound != nil {
+ a = append(a, n.lowerBound)
+ kind &= n.lowerBound.Kind()
+ }
+ if n.upperBound != nil {
+ a = append(a, n.upperBound)
+ kind &= n.upperBound.Kind()
+ }
+ for _, c := range n.checks {
+ // Drop !=x if x is out of bounds with another bound.
+ if b, _ := c.(*adt.BoundValue); b != nil && b.Op == adt.NotEqualOp {
+ if n.upperBound != nil &&
+ adt.SimplifyBounds(ctx, n.kind, n.upperBound, b) != nil {
+ continue
+ }
+ if n.lowerBound != nil &&
+ adt.SimplifyBounds(ctx, n.kind, n.lowerBound, b) != nil {
+ continue
+ }
+ }
+ a = append(a, c)
+ kind &= c.Kind()
+ }
+ if kind&^n.kind != 0 {
+ a = append(a, &adt.BasicType{K: n.kind})
+ }
+
+ var v adt.Value
+ switch len(a) {
+ case 0:
+ // Src is the combined input.
+ v = &adt.BasicType{K: n.kind}
+
+ // TODO: Change to isStruct?
+ if len(n.node.Structs) > 0 {
+ // n.isStruct = true
+ v = structSentinel
+
+ }
+
+ case 1:
+ v = a[0].(adt.Value)
+
+ default:
+ v = &adt.Conjunction{Values: a}
+ }
+
+ return v
+}
+
+func (n *nodeContext) maybeSetCache() {
+ if n.node.Status() > adt.Evaluating { // n.node.Value != nil
+ return
+ }
+ if n.scalar != nil {
+ n.node.SetValue(n.ctx, adt.Partial, n.scalar)
+ }
+ if n.errs != nil {
+ n.node.SetValue(n.ctx, adt.Partial, n.errs)
+ }
+}
+
+type conjunct struct {
+ adt.Conjunct
+ closeID uint32
+ top bool
+}
+
+type envDynamic struct {
+ env *adt.Environment
+ field *adt.DynamicField
+}
+
+type envYield struct {
+ env *adt.Environment
+ yield adt.Yielder
+}
+
+type envList struct {
+ env *adt.Environment
+ list *adt.ListLit
+ n int64 // recorded length after evaluator
+ elipsis *adt.Ellipsis
+}
+
+func (n *nodeContext) addBottom(b *adt.Bottom) {
+ n.errs = adt.CombineErrors(nil, n.errs, b)
+}
+
+func (n *nodeContext) addErr(err errors.Error) {
+ if err != nil {
+ n.errs = adt.CombineErrors(nil, n.errs, &adt.Bottom{
+ Err: err,
+ })
+ }
+}
+
+// addExprConjuncts will attempt to evaluate an adt.Expr and insert the value
+// into the nodeContext if successful or queue it for later evaluation if it is
+// incomplete or is not value.
+func (n *nodeContext) addExprConjunct(v adt.Conjunct, def uint32, top bool) {
+ env := v.Env
+ if env != nil && env.CloseID != def {
+ e := *env
+ e.CloseID = def
+ env = &e
+ }
+ switch x := v.Expr().(type) {
+ case adt.Value:
+ n.addValueConjunct(env, x)
+
+ case *adt.BinaryExpr:
+ if x.Op == adt.AndOp {
+ n.addExprConjunct(adt.MakeConjunct(env, x.X), def, false)
+ n.addExprConjunct(adt.MakeConjunct(env, x.Y), def, false)
+ } else {
+ n.evalExpr(v, def, top)
+ }
+
+ case *adt.StructLit:
+ n.addStruct(env, x, def, top)
+
+ case *adt.ListLit:
+ n.lists = append(n.lists, envList{env: env, list: x})
+
+ case *adt.DisjunctionExpr:
+ if n.disjunctions != nil {
+ _ = n.disjunctions
+ }
+ n.addDisjunction(env, x, def, top)
+
+ default:
+ // Must be Resolver or Evaluator.
+ n.evalExpr(v, def, top)
+ }
+
+ if top {
+ n.updateReplace(v.Env)
+ }
+}
+
+// evalExpr is only called by addExprConjunct.
+func (n *nodeContext) evalExpr(v adt.Conjunct, closeID uint32, top bool) {
+ // Require an Environment.
+ ctx := n.ctx
+
+ switch x := v.Expr().(type) {
+ case adt.Resolver:
+ arc, err := ctx.Resolve(v.Env, x)
+ if err != nil {
+ if err.IsIncomplete() {
+ n.incomplete = adt.CombineErrors(nil, n.incomplete, err)
+ } else {
+ n.addBottom(err)
+ break
+ }
+ }
+ if arc == nil {
+ n.exprs = append(n.exprs, conjunct{v, closeID, top})
+ break
+ }
+
+ // If this is a cycle error, we have reached a fixed point and adding
+ // conjuncts at this point will not change the value. Also, continuing
+ // to pursue this value will result in an infinite loop.
+ //
+ // TODO: add a mechanism so that the computation will only have to be
+ // one once?
+ if isEvaluating(arc) {
+ break
+ }
+
+ // TODO: detect structural cycles here. A structural cycle can occur
+ // if it is not a reference cycle, but refers to a parent. node.
+ // This should only be allowed if it is unified with a finite structure.
+
+ if arc.Label.IsDef() {
+ n.insertClosed(arc)
+ } else {
+ for _, a := range arc.Conjuncts {
+ n.addExprConjunct(a, closeID, top)
+ }
+ }
+
+ case adt.Evaluator:
+ // adt.Interpolation, adt.UnaryExpr, adt.BinaryExpr, adt.CallExpr
+ val, complete := ctx.Evaluate(v.Env, v.Expr())
+ if !complete {
+ n.exprs = append(n.exprs, conjunct{v, closeID, top})
+ break
+ }
+
+ if v, ok := val.(*adt.Vertex); ok {
+ // Handle generated disjunctions (as in the 'or' builtin).
+ // These come as a Vertex, but should not be added as a value.
+ b, ok := v.Value.(*adt.Bottom)
+ if ok && b.IsIncomplete() && len(v.Conjuncts) > 0 {
+ for _, c := range v.Conjuncts {
+ n.addExprConjunct(c, closeID, top)
+ }
+ break
+ }
+ }
+
+ // TODO: insert in vertex as well
+ n.addValueConjunct(v.Env, val)
+
+ default:
+ panic(fmt.Sprintf("unknown expression of type %T", x))
+ }
+}
+
+func (n *nodeContext) insertClosed(arc *adt.Vertex) {
+ id := n.eval.nextID()
+ n.needClose = true
+
+ current := n.newClose
+ n.newClose = nil
+
+ for _, a := range arc.Conjuncts {
+ n.addExprConjunct(a, id, false)
+ }
+
+ current, n.newClose = n.newClose, current
+
+ if current == nil {
+ current = &CloseDef{ID: id}
+ }
+ n.addAnd(current)
+}
+
+func (n *nodeContext) addValueConjunct(env *adt.Environment, v adt.Value) {
+ if x, ok := v.(*adt.Vertex); ok {
+ needClose := false
+ if isStruct(x) {
+ n.isStruct = true
+ // TODO: find better way to mark as struct.
+ // For instance, we may want to add a faux
+ // Structlit for topological sort.
+ // return
+
+ if x.IsClosed(n.ctx) {
+ needClose = true
+ }
+
+ n.node.AddStructs(x.Structs...)
+ }
+
+ if len(x.Conjuncts) > 0 {
+ if needClose {
+ n.insertClosed(x)
+ return
+ }
+ for _, c := range x.Conjuncts {
+ n.addExprConjunct(c, 0, false) // Pass from eval
+ }
+ return
+ }
+
+ if x.IsList() {
+ n.vLists = append(n.vLists, x)
+ return
+ }
+
+ // TODO: evaluate value?
+ switch v := x.Value.(type) {
+ case *adt.ListMarker:
+ panic("unreachable")
+
+ case *adt.StructMarker:
+ for _, a := range x.Arcs {
+ // TODO, insert here as
+ n.insertField(a.Label, adt.MakeConjunct(nil, a))
+ // sub, _ := n.node.GetArc(a.Label)
+ // sub.Add(a)
+ }
+
+ default:
+ n.addValueConjunct(env, v)
+
+ for _, a := range x.Arcs {
+ // TODO, insert here as
+ n.insertField(a.Label, adt.MakeConjunct(nil, a))
+ // sub, _ := n.node.GetArc(a.Label)
+ // sub.Add(a)
+ }
+ }
+
+ return
+ // TODO: Use the Closer to close other fields as well?
+ }
+
+ if b, ok := v.(*adt.Bottom); ok {
+ n.addBottom(b)
+ return
+ }
+
+ ctx := n.ctx
+ kind := n.kind & v.Kind()
+ if kind == adt.BottomKind {
+ // TODO: how to get other conflicting values?
+ n.addErr(errors.Newf(token.NoPos,
+ "invalid value %s (mismatched types %s and %s)",
+ ctx.Str(v), v.Kind(), n.kind))
+ return
+ }
+ n.kind = kind
+
+ switch x := v.(type) {
+ case *adt.Disjunction:
+ n.addDisjunctionValue(env, x, 0, true)
+
+ case *adt.Conjunction:
+ for _, x := range x.Values {
+ n.addValueConjunct(env, x)
+ }
+
+ case *adt.Top:
+ n.hasTop = true
+ // TODO: Is this correct. Needed for elipsis, but not sure for others.
+ n.optionals = append(n.optionals, fieldSet{env: env, isOpen: true})
+
+ case *adt.BasicType:
+
+ case *adt.BoundValue:
+ switch x.Op {
+ case adt.LessThanOp, adt.LessEqualOp:
+ if y := n.upperBound; y != nil {
+ n.upperBound = nil
+ n.addValueConjunct(env, adt.SimplifyBounds(ctx, n.kind, x, y))
+ return
+ }
+ n.upperBound = x
+
+ case adt.GreaterThanOp, adt.GreaterEqualOp:
+ if y := n.lowerBound; y != nil {
+ n.lowerBound = nil
+ n.addValueConjunct(env, adt.SimplifyBounds(ctx, n.kind, x, y))
+ return
+ }
+ n.lowerBound = x
+
+ case adt.EqualOp, adt.NotEqualOp, adt.MatchOp, adt.NotMatchOp:
+ n.checks = append(n.checks, x)
+ return
+ }
+
+ case adt.Validator:
+ n.checks = append(n.checks, x)
+
+ case *adt.Vertex:
+ // handled above.
+
+ case adt.Value: // *NullLit, *BoolLit, *NumLit, *StringLit, *BytesLit
+ if y := n.scalar; y != nil {
+ if b, ok := adt.BinOp(ctx, adt.EqualOp, x, y).(*adt.Bool); !ok || !b.B {
+ n.addErr(errors.Newf(ctx.Pos(), "incompatible values %s and %s", ctx.Str(x), ctx.Str(y)))
+ }
+ // TODO: do we need to explicitly add again?
+ // n.scalar = nil
+ // n.addValueConjunct(c, adt.BinOp(c, adt.EqualOp, x, y))
+ break
+ }
+ n.scalar = x
+
+ default:
+ panic(fmt.Sprintf("unknown value type %T", x))
+ }
+
+ if n.lowerBound != nil && n.upperBound != nil {
+ if u := adt.SimplifyBounds(ctx, n.kind, n.lowerBound, n.upperBound); u != nil {
+ n.lowerBound = nil
+ n.upperBound = nil
+ n.addValueConjunct(env, u)
+ }
+ }
+}
+
+// addStruct collates the declarations of a struct.
+//
+// addStruct fulfills two additional pivotal functions:
+// 1) Implement vertex unification (this happends through De Bruijn indices
+// combined with proper set up of Environments).
+// 2) Implied closedness for definitions.
+//
+func (n *nodeContext) addStruct(
+ env *adt.Environment,
+ s *adt.StructLit,
+ newDef uint32,
+ top bool) {
+
+ ctx := n.ctx
+ n.node.AddStructs(s)
+
+ // Inherit closeID from environment, unless this is a new definition.
+ closeID := newDef
+ if closeID == 0 && env != nil {
+ closeID = env.CloseID
+ }
+
+ // NOTE: This is a crucial point in the code:
+ // Unification derferencing happens here. The child nodes are set to
+ // an Environment linked to the current node. Together with the De Bruijn
+ // indices, this determines to which Vertex a reference resolves.
+
+ // TODO(perf): consider using environment cache:
+ // var childEnv *adt.Environment
+ // for _, s := range n.nodeCache.sub {
+ // if s.Up == env {
+ // childEnv = s
+ // }
+ // }
+ childEnv := &adt.Environment{
+ Up: env,
+ Vertex: n.node,
+ CloseID: closeID,
+ }
+
+ var hasOther, hasBulk adt.Node
+
+ opt := fieldSet{env: childEnv}
+
+ for _, d := range s.Decls {
+ switch x := d.(type) {
+ case *adt.Field:
+ opt.MarkField(ctx, x)
+ // handle in next iteration.
+
+ case *adt.OptionalField:
+ opt.AddOptional(ctx, x)
+
+ case *adt.DynamicField:
+ hasOther = x
+ n.dynamicFields = append(n.dynamicFields, envDynamic{childEnv, x})
+ opt.AddDynamic(ctx, childEnv, x)
+
+ case *adt.ForClause:
+ hasOther = x
+ n.forClauses = append(n.forClauses, envYield{childEnv, x})
+
+ case adt.Yielder:
+ hasOther = x
+ n.ifClauses = append(n.ifClauses, envYield{childEnv, x})
+
+ case adt.Expr:
+ // push and opo embedding type.
+ id := n.eval.nextID()
+
+ current := n.newClose
+ n.newClose = nil
+
+ hasOther = x
+ n.addExprConjunct(adt.MakeConjunct(childEnv, x), id, false)
+
+ current, n.newClose = n.newClose, current
+
+ if current == nil {
+ current = &CloseDef{ID: id} // TODO: isClosed?
+ } else {
+ // n.needClose = true
+ }
+ n.addOr(closeID, current)
+
+ case *adt.BulkOptionalField:
+ hasBulk = x
+ opt.AddBulk(ctx, x)
+
+ case *adt.Ellipsis:
+ hasBulk = x
+ opt.AddEllipsis(ctx, x)
+
+ default:
+ panic("unreachable")
+ }
+ }
+
+ if hasBulk != nil && hasOther != nil {
+ n.addErr(errors.Newf(token.NoPos, "cannot mix bulk optional fields with dynamic fields, embeddings, or comprehensions within the same struct"))
+ }
+
+ // Apply existing fields
+ for _, arc := range n.node.Arcs {
+ // Reuse adt.Acceptor interface.
+ opt.MatchAndInsert(ctx, arc)
+ }
+
+ n.optionals = append(n.optionals, opt)
+
+ for _, d := range s.Decls {
+ switch x := d.(type) {
+ case *adt.Field:
+ n.insertField(x.Label, adt.MakeConjunct(childEnv, x))
+ }
+ }
+}
+
+func (n *nodeContext) insertField(f adt.Feature, x adt.Conjunct) *adt.Vertex {
+ ctx := n.ctx
+ arc, isNew := n.node.GetArc(f)
+
+ if f.IsString() {
+ n.isStruct = true
+ }
+
+ // TODO: disallow adding conjuncts when cache set?
+ arc.AddConjunct(x)
+
+ if isNew {
+ for _, o := range n.optionals {
+ o.MatchAndInsert(ctx, arc)
+ }
+ }
+ return arc
+}
+
+// expandOne adds dynamic fields to a node until a fixed point is reached.
+// On each iteration, dynamic fields that cannot resolve due to incomplete
+// values are skipped. They will be retried on the next iteration until no
+// progress can be made. Note that a dynamic field may add more dynamic fields.
+//
+// forClauses are processed after all other clauses. A struct may be referenced
+// before it is complete, meaning that fields added by other forms of injection
+// may influence the result of a for clause _after_ it has already been
+// processed. We could instead detect such insertion and feed it to the
+// ForClause to generate another entry or have the for clause be recomputed.
+// This seems to be too complicated and lead to iffy edge cases.
+// TODO(error): detect when a field is added to a struct that is already used
+// in a for clause.
+func (n *nodeContext) expandOne() (done bool) {
+ if n.done() {
+ return false
+ }
+
+ var progress bool
+
+ if progress = n.injectDynamic(); progress {
+ return true
+ }
+
+ if n.ifClauses, progress = n.injectEmbedded(n.ifClauses); progress {
+ return true
+ }
+
+ if n.forClauses, progress = n.injectEmbedded(n.forClauses); progress {
+ return true
+ }
+
+ // Do expressions after comprehensions, as comprehensions can never
+ // refer to embedded scalars, whereas expressions may refer to generated
+ // fields if we were to allow attributes to be defined alongside
+ // scalars.
+ exprs := n.exprs
+ n.exprs = n.exprs[:0]
+ for _, x := range exprs {
+ n.addExprConjunct(x.Conjunct, x.closeID, x.top)
+
+ // collect and and or
+ }
+ if len(n.exprs) < len(exprs) {
+ return true
+ }
+
+ // No progress, report error later if needed: unification with
+ // disjuncts may resolve this later later on.
+ return false
+}
+
+// injectDynamic evaluates and inserts dynamic declarations.
+func (n *nodeContext) injectDynamic() (progress bool) {
+ ctx := n.ctx
+ k := 0
+
+ a := n.dynamicFields
+ for _, d := range n.dynamicFields {
+ var f adt.Feature
+ v, complete := ctx.Evaluate(d.env, d.field.Key)
+ if !complete {
+ a[k] = d
+ k++
+ continue
+ }
+ if b, _ := v.(*adt.Bottom); b != nil {
+ n.addValueConjunct(nil, b)
+ continue
+ }
+ f = ctx.Label(v)
+ n.insertField(f, adt.MakeConjunct(d.env, d.field))
+ }
+
+ progress = k < len(n.dynamicFields)
+
+ n.dynamicFields = a[:k]
+
+ return progress
+}
+
+// injectEmbedded evaluates and inserts embeddings. It first evaluates all
+// embeddings before inserting the results to ensure that the order of
+// evaluation does not matter.
+func (n *nodeContext) injectEmbedded(all []envYield) (a []envYield, progress bool) {
+ ctx := n.ctx
+ type envStruct struct {
+ env *adt.Environment
+ s *adt.StructLit
+ }
+ var sa []envStruct
+ f := func(env *adt.Environment, st *adt.StructLit) {
+ sa = append(sa, envStruct{env, st})
+ }
+
+ k := 0
+ for _, d := range all {
+ sa = sa[:0]
+
+ if err := ctx.Yield(d.env, d.yield, f); err != nil {
+ if err.IsIncomplete() {
+ all[k] = d
+ k++
+ } else {
+ // continue to collect other errors.
+ n.addBottom(err)
+ }
+ continue
+ }
+
+ for _, st := range sa {
+ n.addStruct(st.env, st.s, 0, true)
+ }
+ }
+
+ return all[:k], k < len(all)
+}
+
+// addLists
+//
+// TODO: association arrays:
+// If an association array marker was present in a struct, create a struct node
+// instead of a list node. In either case, a node may only have list fields
+// or struct fields and not both.
+//
+// addLists should be run after the fixpoint expansion:
+// - it enforces that comprehensions may not refer to the list itself
+// - there may be no other fields within the list.
+//
+// TODO(embeddedScalars): for embedded scalars, there should be another pass
+// of evaluation expressions after expanding lists.
+func (n *nodeContext) addLists(c *adt.OpContext) {
+ if len(n.lists) == 0 && len(n.vLists) == 0 {
+ return
+ }
+
+ for _, a := range n.node.Arcs {
+ if t := a.Label.Typ(); t == adt.StringLabel {
+ n.addErr(errors.Newf(token.NoPos, "conflicting types list and struct"))
+ }
+ }
+
+ // fmt.Println(len(n.lists), "ELNE")
+
+ isOpen := true
+ max := 0
+ var maxNode adt.Expr
+
+ for _, l := range n.vLists {
+ elems := l.Elems()
+ isClosed := l.IsClosed(c)
+
+ switch {
+ case len(elems) < max:
+ if isClosed {
+ n.invalidListLength(len(elems), max, l, maxNode)
+ continue
+ }
+
+ case len(elems) > max:
+ if !isOpen {
+ n.invalidListLength(max, len(elems), maxNode, l)
+ continue
+ }
+ isOpen = !isClosed
+ max = len(elems)
+ maxNode = l
+
+ case isClosed:
+ isOpen = false
+ maxNode = l
+ }
+
+ for _, a := range elems {
+ if a.Conjuncts == nil {
+ n.insertField(a.Label, adt.MakeConjunct(nil, a.Value))
+ continue
+ }
+ for _, c := range a.Conjuncts {
+ n.insertField(a.Label, c)
+ }
+ }
+ }
+
+outer:
+ for i, l := range n.lists {
+ index := int64(0)
+ hasComprehension := false
+ for j, elem := range l.list.Elems {
+ switch x := elem.(type) {
+ case adt.Yielder:
+ err := c.Yield(l.env, x, func(e *adt.Environment, st *adt.StructLit) {
+ label, err := adt.MakeLabel(x.Source(), index, adt.IntLabel)
+ n.addErr(err)
+ index++
+ n.insertField(label, adt.MakeConjunct(e, st))
+ })
+ hasComprehension = true
+ if err.IsIncomplete() {
+
+ }
+
+ case *adt.Ellipsis:
+ if j != len(l.list.Elems)-1 {
+ n.addErr(errors.Newf(token.NoPos,
+ "ellipsis must be last element in list"))
+ }
+
+ n.lists[i].elipsis = x
+
+ default:
+ label, err := adt.MakeLabel(x.Source(), index, adt.IntLabel)
+ n.addErr(err)
+ index++ // TODO: don't use insertField.
+ n.insertField(label, adt.MakeConjunct(l.env, x))
+ }
+
+ // Terminate early n case of runaway comprehension.
+ if !isOpen && int(index) > max {
+ n.invalidListLength(max, int(index), maxNode, l.list)
+ continue outer
+ }
+ }
+
+ switch closed := n.lists[i].elipsis == nil; {
+ case int(index) < max:
+ if closed {
+ n.invalidListLength(int(index), max, l.list, maxNode)
+ continue
+ }
+
+ case int(index) > max,
+ closed && isOpen,
+ (!closed == isOpen) && !hasComprehension:
+ max = int(index)
+ maxNode = l.list
+ isOpen = !closed
+ }
+
+ n.lists[i].n = index
+ }
+
+ // add additionalItem values to list and construct optionals.
+ elems := n.node.Elems()
+ for _, l := range n.vLists {
+ a, _ := l.Closed.(*acceptor)
+ if a == nil {
+ continue
+ }
+
+ newElems := l.Elems()
+ if len(newElems) >= len(elems) {
+ continue // error generated earlier, if applicable.
+ }
+
+ n.optionals = append(n.optionals, a.fields...)
+
+ for _, arc := range elems[len(newElems):] {
+ l.MatchAndInsert(c, arc)
+ }
+ }
+
+ for _, l := range n.lists {
+ if l.elipsis == nil {
+ continue
+ }
+
+ f := fieldSet{env: l.env}
+ f.AddEllipsis(c, l.elipsis)
+
+ n.optionals = append(n.optionals, f)
+
+ for _, arc := range elems[l.n:] {
+ f.MatchAndInsert(c, arc)
+ }
+ }
+
+ sources := []ast.Expr{}
+ // Add conjuncts for additional items.
+ for _, l := range n.lists {
+ if l.elipsis == nil {
+ continue
+ }
+ if src, _ := l.elipsis.Source().(ast.Expr); src != nil {
+ sources = append(sources, src)
+ }
+ }
+
+ n.openList = isOpen
+
+ n.node.SetValue(c, adt.Partial, &adt.ListMarker{
+ Src: ast.NewBinExpr(token.AND, sources...),
+ IsOpen: isOpen,
+ })
+}
+
+func (n *nodeContext) invalidListLength(na, nb int, a, b adt.Expr) {
+ n.addErr(errors.Newf(n.ctx.Pos(),
+ "incompatible list lengths (%d and %d)", na, nb))
+}
diff --git a/internal/core/eval/eval_test.go b/internal/core/eval/eval_test.go
new file mode 100644
index 0000000..9dea9a5
--- /dev/null
+++ b/internal/core/eval/eval_test.go
@@ -0,0 +1,156 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+ "flag"
+ "fmt"
+ "testing"
+
+ "cuelang.org/go/cue/format"
+ "cuelang.org/go/cue/parser"
+ "cuelang.org/go/internal/core/adt"
+ "cuelang.org/go/internal/core/compile"
+ "cuelang.org/go/internal/core/debug"
+ "cuelang.org/go/internal/core/runtime"
+ "cuelang.org/go/internal/cuetxtar"
+ "cuelang.org/go/pkg/strings"
+)
+
+var (
+ update = flag.Bool("update", false, "update the test files")
+ todo = flag.Bool("todo", false, "run tests marked with #todo-compile")
+)
+
+func TestEval(t *testing.T) {
+ test := cuetxtar.TxTarTest{
+ Root: "../../../cue/testdata",
+ Name: "eval",
+ Update: *update,
+ Skip: alwaysSkip,
+ ToDo: needFix,
+ }
+
+ if *todo {
+ test.ToDo = nil
+ }
+
+ r := runtime.New()
+
+ test.Run(t, func(t *cuetxtar.Test) {
+ a := t.ValidInstances()
+
+ v, err := compile.Files(nil, r, a[0].Files...)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ e := Evaluator{
+ r: r,
+ index: r,
+ }
+
+ err = e.Eval(v)
+ t.WriteErrors(err)
+
+ if v == nil {
+ return
+ }
+
+ debug.WriteNode(t, r, v, &debug.Config{Cwd: t.Dir})
+ fmt.Fprintln(t)
+ })
+}
+
+var alwaysSkip = map[string]string{
+ "compile/erralias": "compile error",
+}
+
+var needFix = map[string]string{
+ "fulleval/048_dont_pass_incomplete_values_to_builtins": "import",
+ "fulleval/050_json_Marshaling_detects_incomplete": "import",
+ "fulleval/051_detectIncompleteYAML": "import",
+ "fulleval/052_detectIncompleteJSON": "import",
+ "fulleval/056_issue314": "import",
+ "resolve/013_custom_validators": "import",
+
+ "export/027": "cycle",
+ "export/028": "cycle",
+ "export/030": "cycle",
+
+ "cycle/025_cannot_resolve_references_that_would_be_ambiguous": "cycle",
+
+ "export/020": "builtin",
+ "resolve/034_closing_structs": "builtin",
+ "resolve/048_builtins": "builtin",
+
+ "fulleval/027_len_of_incomplete_types": "builtin",
+
+ "fulleval/032_or_builtin_should_not_fail_on_non-concrete_empty_list": "builtin",
+
+ "fulleval/049_alias_reuse_in_nested_scope": "builtin",
+ "fulleval/053_issue312": "builtin",
+}
+
+// TestX is for debugging. Do not delete.
+func TestX(t *testing.T) {
+ t.Skip()
+ in := `
+ // max: >99 | *((5|*1) & 5)
+ // *( 5 | *_|_ )
+ // 1 | *((5|*1) & 5)
+
+
+ max: >= (num+0) | * (num+0)
+ res: !=4 | * 1
+ num: *(1+(res+0)) | >(res+0)
+
+ // (1 | *2 | 3) & (1 | 2 | *3)
+
+ // m1: (*1 | (*2 | 3)) & (>=2 & <=3)
+ // m2: (*1 | (*2 | 3)) & (2 | 3)
+ // m3: (*1 | *(*2 | 3)) & (2 | 3)
+ // b: (*"a" | "b") | "c"
+ // {a: 1} | {b: 2}
+ `
+
+ if strings.TrimSpace(in) == "" {
+ t.Skip()
+ }
+
+ file, err := parser.ParseFile("TestX", in)
+ if err != nil {
+ t.Fatal(err)
+ }
+ r := runtime.New()
+
+ b, err := format.Node(file)
+ _, _ = b, err
+ // fmt.Println(string(b), err)
+
+ v, err := compile.Files(nil, r, file)
+ if err != nil {
+ t.Fatal(err)
+ }
+
+ ctx := NewContext(r, v)
+
+ ctx.Unify(ctx, v, adt.Finalized)
+ // if err != nil {
+ // t.Fatal(err)
+ // }
+
+ t.Error(debug.NodeString(r, v, nil))
+}
diff --git a/internal/core/eval/optionals.go b/internal/core/eval/optionals.go
new file mode 100644
index 0000000..1dbd376
--- /dev/null
+++ b/internal/core/eval/optionals.go
@@ -0,0 +1,236 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+// TODO: rename this file to fieldset.go
+
+import "cuelang.org/go/internal/core/adt"
+
+// fieldSet represents the fields for a single struct literal, along
+// the constraints of fields that may be added.
+type fieldSet struct {
+ // TODO: look at consecutive identical environments to figure out
+ // what belongs to same definition?
+ env *adt.Environment
+
+ // field marks the optional conjuncts of all explicit fields.
+ // Required fields are marked as empty
+ fields []field
+
+ // literal map[adt.Feature][]adt.Node
+
+ // excluded are all literal fields that already exist.
+ bulk []bulkField
+ additional []adt.Expr
+ isOpen bool // has a ...
+}
+
+type field struct {
+ label adt.Feature
+ optional []adt.Node
+}
+
+type bulkField struct {
+ check fieldMatcher
+ expr adt.Node // *adt.BulkOptionalField // Conjunct
+}
+
+func (o *fieldSet) Accept(c *adt.OpContext, f adt.Feature) bool {
+ if len(o.additional) > 0 {
+ return true
+ }
+ if o.fieldIndex(f) >= 0 {
+ return true
+ }
+ for _, b := range o.bulk {
+ if b.check.Match(c, f) {
+ return true
+ }
+ }
+ return false
+}
+
+// MatchAndInsert finds matching optional parts for a given Arc and adds its
+// conjuncts. Bulk fields are only applied if no fields match, and additional
+// constraints are only added if neither regular nor bulk fields match.
+func (o *fieldSet) MatchAndInsert(c *adt.OpContext, arc *adt.Vertex) {
+ env := o.env
+
+ // Match normal fields
+ p := 0
+ for ; p < len(o.fields); p++ {
+ if o.fields[p].label == arc.Label {
+ break
+ }
+ }
+ if p < len(o.fields) {
+ for _, e := range o.fields[p].optional {
+ arc.AddConjunct(adt.MakeConjunct(env, e))
+ }
+ return
+ }
+
+ if !arc.Label.IsRegular() {
+ return
+ }
+
+ bulkEnv := *env
+ bulkEnv.DynamicLabel = arc.Label
+
+ // match bulk optional fields / pattern properties
+ matched := false
+ for _, f := range o.bulk {
+ if f.check.Match(c, arc.Label) {
+ matched = true
+ if f.expr != nil {
+ arc.AddConjunct(adt.MakeConjunct(&bulkEnv, f.expr))
+ }
+ }
+ }
+ if matched {
+ return
+ }
+
+ // match others
+ for _, x := range o.additional {
+ arc.AddConjunct(adt.MakeConjunct(env, x))
+ }
+}
+
+func (o *fieldSet) fieldIndex(f adt.Feature) int {
+ for i := range o.fields {
+ if o.fields[i].label == f {
+ return i
+ }
+ }
+ return -1
+}
+
+func (o *fieldSet) MarkField(c *adt.OpContext, x *adt.Field) {
+ if o.fieldIndex(x.Label) < 0 {
+ o.fields = append(o.fields, field{label: x.Label})
+ }
+}
+
+func (o *fieldSet) AddOptional(c *adt.OpContext, x *adt.OptionalField) {
+ p := o.fieldIndex(x.Label)
+ if p < 0 {
+ p = len(o.fields)
+ o.fields = append(o.fields, field{label: x.Label})
+ }
+ o.fields[p].optional = append(o.fields[p].optional, x)
+}
+
+func (o *fieldSet) AddDynamic(c *adt.OpContext, env *adt.Environment, x *adt.DynamicField) {
+ // not in bulk: count as regular field?
+ o.bulk = append(o.bulk, bulkField{dynamicMatcher{env, x.Key}, nil})
+}
+
+func (o *fieldSet) AddBulk(c *adt.OpContext, x *adt.BulkOptionalField) {
+ v, ok := c.Evaluate(o.env, x.Filter)
+ if !ok {
+ // TODO: handle dynamically
+ return
+ }
+ switch f := v.(type) {
+ case *adt.Num:
+ // Just assert an error. Lists have not been expanded yet at
+ // this point, so there is no need to check for existing
+ //fields.
+ l, err := adt.MakeLabel(x.Src, c.Int64(f), adt.IntLabel)
+ if err != nil {
+ c.AddErr(err)
+ return
+ }
+ o.bulk = append(o.bulk, bulkField{labelMatcher(l), x})
+
+ case *adt.Top:
+ o.bulk = append(o.bulk, bulkField{typeMatcher(adt.TopKind), x})
+
+ case *adt.BasicType:
+ o.bulk = append(o.bulk, bulkField{typeMatcher(f.K), x})
+
+ case *adt.String:
+ l := c.Label(f)
+ o.bulk = append(o.bulk, bulkField{labelMatcher(l), x})
+
+ case adt.Validator:
+ o.bulk = append(o.bulk, bulkField{validateMatcher{f}, x})
+
+ default:
+ // TODO(err): not allowed type
+ }
+}
+
+func (o *fieldSet) AddEllipsis(c *adt.OpContext, x *adt.Ellipsis) {
+ expr := x.Value
+ if x.Value == nil {
+ o.isOpen = true
+ expr = &adt.Top{}
+ }
+ o.additional = append(o.additional, expr)
+}
+
+type fieldMatcher interface {
+ Match(c *adt.OpContext, f adt.Feature) bool
+}
+
+type labelMatcher adt.Feature
+
+func (m labelMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+ return adt.Feature(m) == f
+}
+
+type typeMatcher adt.Kind
+
+func (m typeMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+ switch f.Typ() {
+ case adt.StringLabel:
+ return adt.Kind(m)&adt.StringKind != 0
+
+ case adt.IntLabel:
+ return adt.Kind(m)&adt.IntKind != 0
+ }
+ return false
+}
+
+type validateMatcher struct {
+ adt.Validator
+}
+
+func (m validateMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+ v := f.ToValue(c)
+ return c.Validate(m.Validator, v) == nil
+}
+
+type dynamicMatcher struct {
+ env *adt.Environment
+ expr adt.Expr
+}
+
+func (m dynamicMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+ if !f.IsRegular() || !f.IsString() {
+ return false
+ }
+ v, ok := c.Evaluate(m.env, m.expr)
+ if !ok {
+ return false
+ }
+ s, ok := v.(*adt.String)
+ if !ok {
+ return false
+ }
+ return f.SelectorString(c) == s.Str
+}