internal/core/eval: implement core evaluator

Does not yet implement imports and builtins.

- adds implementations for adt types
- adds eval package with higher-level evaluation
- some tweaks to compile

Change-Id: Ie91bd0bde8a03ed9957f306166042f56aebe19ce
Reviewed-on: https://cue-review.googlesource.com/c/cue/+/6280
Reviewed-by: Marcel van Lohuizen <mpvl@golang.org>
diff --git a/internal/core/eval/closed.go b/internal/core/eval/closed.go
new file mode 100644
index 0000000..10cee11
--- /dev/null
+++ b/internal/core/eval/closed.go
@@ -0,0 +1,340 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+// The file implements the majority of the closed struct semantics.
+// The data is recorded in the Closed field of a Vertex.
+//
+// Each vertex has a set of conjuncts that make up the values of the vertex.
+// Each Conjunct may originate from various sources, like an embedding, field
+// definition or regular value. For the purpose of computing the value, the
+// source of the conjunct is irrelevant. The origin does matter, however, if
+// for determining whether a field is allowed in a closed struct. The Closed
+// field keeps track of the kind of origin for this purpose.
+//
+// More precisely, the CloseDef struct explains how the conjuncts of an arc
+// were combined and define a logical expression on the field sets
+// computed for each conjunct.
+//
+// While evaluating each conjunct, nodeContext keeps track what changes need to
+// be made to ClosedDef based on the evaluation of the current conjuncts.
+// For instance, if a field references a definition, all other previous
+// checks are useless, as the newly referred to definitions define an upper
+// bound and will contain all the information that is necessary to determine
+// whether a field may be included.
+//
+// Most of the logic in this file concerns itself with the combination of
+// multiple CloseDef values as well as traversing the structure to validate
+// whether an arc is allowed. The actual fieldSet logic is in optional.go
+// The overal control and use of the functionality in this file is used
+// in eval.go.
+
+import (
+	"cuelang.org/go/cue/errors"
+	"cuelang.org/go/cue/token"
+	"cuelang.org/go/internal/core/adt"
+)
+
+// acceptor implements adt.Acceptor.
+//
+// Note that it keeps track of whether it represents a closed struct. An
+// acceptor is also used to associate an CloseDef with a Vertex, and not
+// all CloseDefs represent a closed struct: a value that contains embeddings may
+// eventually turn into a closed struct. Consider
+//
+//    a: {
+//       b
+//       d: e: int
+//    }
+//    b: d: {
+//       #A & #B
+//    }
+//
+// At the point of evaluating `a`, the struct is not yet closed. However,
+// descending into `d` will trigger the inclusion of defintions which in turn
+// causes the struct to be closed. At this point, it is important to know that
+// `b` originated from an embedding, as otherwise `e` may not be allowed.
+//
+type acceptor struct {
+	tree     *CloseDef
+	fields   []fieldSet
+	isClosed bool
+	isList   bool
+	openList bool
+}
+
+func (a *acceptor) Accept(c *adt.OpContext, f adt.Feature) bool {
+	if a.isList {
+		return a.openList
+	}
+	if !a.isClosed {
+		return true
+	}
+	if f == adt.InvalidLabel {
+		return false
+	}
+	if f.IsInt() {
+		return a.openList
+	}
+	return a.verifyArcAllowed(c, f) == nil
+}
+
+func (a *acceptor) MatchAndInsert(c *adt.OpContext, v *adt.Vertex) {
+	for _, fs := range a.fields {
+		fs.MatchAndInsert(c, v)
+	}
+}
+
+// CloseDef defines how individual FieldSets (corresponding to conjuncts)
+// combine to determine whether a field is contained in a closed set.
+//
+// Nodes with a non-empty List and IsAnd is false represent embeddings.
+// The ID is the node that contained the embedding in that case.
+//
+// Nodes with a non-empty List and IsAnd is true represent conjunctions of
+// definitions. In this case, a field must be contained in each definition.
+//
+// If a node has both conjunctions of definitions and embeddings, only the
+// former are maintained. Conjunctions of definitions define an upper bound
+// of the set of allowed fields in that case and the embeddings will not add
+// any value.
+type CloseDef struct {
+	ID    uint32
+	IsAnd bool
+	List  []*CloseDef
+}
+
+// isOr reports whether this is a node representing embeddings.
+func isOr(c *CloseDef) bool {
+	return len(c.List) > 0 && !c.IsAnd
+}
+
+// updateClosed transforms c into a new node with all non-AND nodes with an
+// ID matching one in replace substituted with the replace value.
+//
+// Vertex only keeps track of a flat list of conjuncts and does not keep track
+// of the hierarchy of how these were derived. This function allows rewriting
+// a CloseDef tree based on replacement information gathered during evaluation
+// of this flat list.
+//
+func updateClosed(c *CloseDef, replace map[uint32]*CloseDef) *CloseDef { // used in eval.go
+	switch {
+	case c == nil:
+		and := []*CloseDef{}
+		for _, c := range replace {
+			and = append(and, c)
+		}
+		switch len(and) {
+		case 0:
+		case 1:
+			c = and[0]
+		default:
+			c = &CloseDef{IsAnd: true, List: and}
+		}
+		// needClose
+	case len(replace) > 0:
+		c = updateClosedRec(c, replace)
+	}
+	return c
+}
+
+func updateClosedRec(c *CloseDef, replace map[uint32]*CloseDef) *CloseDef {
+	if c == nil {
+		return nil
+	}
+
+	// If c is a leaf or AND node, replace it outright. If both are an OR node,
+	// merge the lists.
+	if len(c.List) == 0 || !c.IsAnd {
+		if sub := replace[c.ID]; sub != nil {
+			if isOr(sub) && isOr(c) {
+				sub.List = append(sub.List, c.List...)
+			}
+			return sub
+		}
+	}
+
+	changed := false
+	buf := make([]*CloseDef, len(c.List))
+	k := 0
+	for _, c := range c.List {
+		n := updateClosedRec(c, replace)
+		changed = changed || n != c
+		if n != nil {
+			buf[k] = n
+			k++
+		}
+	}
+	if !changed {
+		return c
+	}
+
+	if k == 1 {
+		return buf[0]
+	}
+
+	return &CloseDef{ID: c.ID, IsAnd: c.IsAnd, List: buf[:k]}
+}
+
+// UpdateReplace is called after evaluating a conjunct at the top of the arc
+// to update the replacement information with the gathered CloseDef info.
+func (n *nodeContext) updateReplace(env *adt.Environment) { // used in eval.go
+	if n.newClose == nil {
+		return
+	}
+
+	if n.replace == nil {
+		n.replace = make(map[uint32]*CloseDef)
+	}
+
+	id := uint32(0)
+	if env != nil {
+		id = env.CloseID
+	}
+
+	n.replace[id] = updateClose(n.replace[id], n.newClose)
+	n.newClose = nil
+}
+
+// appendList creates a new CloseDef with the elements of the list of orig
+// and updated appended. It will take the ID of orig. It does not alter
+// either orig or update.
+func appendLists(orig, update *CloseDef) *CloseDef {
+	list := make([]*CloseDef, len(orig.List)+len(update.List))
+	copy(list[copy(list, orig.List):], update.List)
+	c := *orig
+	c.List = list
+	return &c
+}
+
+// updateClose merges update into orig without altering either.
+//
+// The merge takes into account whether it is an embedding node or not.
+// Most notably, if an "And" node is combined with an embedding, the
+// embedding information may be discarded.
+func updateClose(orig, update *CloseDef) *CloseDef {
+	switch {
+	case orig == nil:
+		return update
+	case isOr(orig):
+		if !isOr(update) {
+			return update
+		}
+		return appendLists(orig, update)
+	case isOr(update):
+		return orig
+	case len(orig.List) == 0 && len(update.List) == 0:
+		return &CloseDef{IsAnd: true, List: []*CloseDef{orig, update}}
+	case len(orig.List) == 0:
+		update.List = append(update.List, orig)
+		return update
+	default: // isAnd(orig)
+		return appendLists(orig, update)
+	}
+}
+
+func (n *nodeContext) addAnd(c *CloseDef) { // used in eval.go
+	switch {
+	case n.newClose == nil:
+		n.newClose = c
+	case isOr(n.newClose):
+		n.newClose = c
+	case len(n.newClose.List) == 0:
+		n.newClose = &CloseDef{
+			IsAnd: true,
+			List:  []*CloseDef{n.newClose, c},
+		}
+	default:
+		n.newClose.List = append(n.newClose.List, c)
+	}
+}
+
+func (n *nodeContext) addOr(parentID uint32, c *CloseDef) { // used in eval.go
+	switch {
+	case n.newClose == nil:
+		d := &CloseDef{ID: parentID, List: []*CloseDef{{ID: parentID}}}
+		if c != nil {
+			d.List = append(d.List, c)
+		}
+		n.newClose = d
+	case isOr(n.newClose):
+		d := n.newClose
+		if c != nil {
+			d.List = append(d.List, c)
+		}
+	}
+}
+
+// verifyArcAllowed checks whether f is an allowed label within the current
+// node. It traverses c considering the "or" semantics of embeddings and the
+// "and" semantics of conjunctions. It generates an error if a field is not
+// allowed.
+func (n *acceptor) verifyArcAllowed(ctx *adt.OpContext, f adt.Feature) *adt.Bottom {
+	filter := f.IsString() || f == adt.InvalidLabel
+	if filter && !n.verifyArcRecursive(ctx, n.tree, f) {
+		label := f.SelectorString(ctx)
+		return &adt.Bottom{
+			Err: errors.Newf(token.NoPos, "field `%s` not allowed", label),
+		}
+	}
+	return nil
+}
+
+func (n *acceptor) verifyArcRecursive(ctx *adt.OpContext, c *CloseDef, f adt.Feature) bool {
+	if len(c.List) == 0 {
+		return n.verifyDefinition(ctx, c.ID, f)
+	}
+	if c.IsAnd {
+		for _, c := range c.List {
+			if !n.verifyArcRecursive(ctx, c, f) {
+				return false
+			}
+		}
+		return true
+	}
+	for _, c := range c.List {
+		if n.verifyArcRecursive(ctx, c, f) {
+			return true
+		}
+	}
+	return false
+}
+
+// verifyDefintion reports whether f is a valid member for any of the fieldSets
+// with the same closeID.
+func (n *acceptor) verifyDefinition(ctx *adt.OpContext, closeID uint32, f adt.Feature) (ok bool) {
+	for _, o := range n.fields {
+		if o.env.CloseID != closeID {
+			continue
+		}
+
+		if len(o.additional) > 0 || o.isOpen {
+			return true
+		}
+
+		for _, g := range o.fields {
+			if f == g.label {
+				return true
+			}
+		}
+
+		for _, b := range o.bulk {
+			if b.check.Match(ctx, f) {
+				return true
+			}
+		}
+	}
+	return false
+}
diff --git a/internal/core/eval/closed_test.go b/internal/core/eval/closed_test.go
new file mode 100644
index 0000000..144ecce
--- /dev/null
+++ b/internal/core/eval/closed_test.go
@@ -0,0 +1,135 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+	"testing"
+
+	"github.com/google/go-cmp/cmp"
+)
+
+func TestRewriteClosed(t *testing.T) {
+	testCases := []struct {
+		desc    string
+		close   *CloseDef
+		replace map[uint32]*CloseDef
+		want    *CloseDef
+	}{{
+		desc: "a: #A & #B",
+		close: &CloseDef{
+			ID: 1,
+		},
+		replace: map[uint32]*CloseDef{
+			1: {ID: 1, IsAnd: true, List: []*CloseDef{{ID: 2}, {ID: 3}}},
+		},
+		want: &CloseDef{
+			ID:    0x01,
+			IsAnd: true,
+			List:  []*CloseDef{{ID: 2}, {ID: 3}},
+		},
+	}, {
+		// Eliminate an embedding for which there are no more entries.
+		// 	desc: "eliminateOneEmbedding",
+		// 	close: &CloseDef{
+		// 		ID: 0,
+		// 		List: []*CloseDef{
+		// 			{ID: 2},
+		// 			{ID: 3},
+		// 		},
+		// 	},
+		// 	replace: map[uint32]*CloseDef{2: nil},
+		// 	want:    &CloseDef{ID: 2},
+		// }, {
+		// Do not eliminate an embedding that has a replacement.
+		desc: "eliminateOneEmbeddingByMultiple",
+		close: &CloseDef{
+			ID: 0,
+			List: []*CloseDef{
+				{ID: 2},
+				{ID: 3},
+			},
+		},
+		replace: map[uint32]*CloseDef{
+			2: nil,
+			3: {ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
+		},
+		want: &CloseDef{
+			ID: 0x00,
+			List: []*CloseDef{
+				{ID: 2},
+				{ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
+			},
+		},
+	}, {
+		// Select b within a
+		// a: {      // ID: 0
+		//     #A    // ID: 1
+		//     #B    // ID: 2
+		//     b: #C // ID: 0
+		// }
+		// #C: {
+		//     b: #D // ID: 3
+		// }
+		//
+		desc: "embeddingOverruledByField",
+		close: &CloseDef{
+			ID: 0,
+			List: []*CloseDef{
+				{ID: 1},
+				{ID: 2},
+				{ID: 0},
+			},
+		},
+		replace: map[uint32]*CloseDef{0: {ID: 3}},
+		want:    &CloseDef{ID: 3},
+	}, {
+		// Select b within a
+		// a: {      // ID: 0
+		//     #A    // ID: 1
+		//     #B    // ID: 2
+		//     b: #C // ID: 0
+		// }
+		// #C: {
+		//     b: #D & #E // ID: 3 & 4
+		// }
+		//
+		desc: "embeddingOverruledByMultiple",
+		close: &CloseDef{
+			ID: 0,
+			List: []*CloseDef{
+				{ID: 1},
+				{ID: 2},
+				{ID: 0},
+			},
+		},
+		replace: map[uint32]*CloseDef{
+			0: {IsAnd: true, List: []*CloseDef{{ID: 3}, {ID: 4}}},
+		},
+		want: &CloseDef{
+			ID:    0,
+			IsAnd: true,
+			List:  []*CloseDef{{ID: 3}, {ID: 4}},
+		},
+	}}
+
+	for _, tc := range testCases {
+		t.Run(tc.desc, func(t *testing.T) {
+			got := updateClosed(tc.close, tc.replace)
+			if !cmp.Equal(got, tc.want) {
+				t.Error(cmp.Diff(got, tc.want))
+			}
+		})
+	}
+}
diff --git a/internal/core/eval/disjunct.go b/internal/core/eval/disjunct.go
new file mode 100644
index 0000000..db5df3c
--- /dev/null
+++ b/internal/core/eval/disjunct.go
@@ -0,0 +1,376 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+	"sort"
+
+	"cuelang.org/go/cue/errors"
+	"cuelang.org/go/cue/token"
+	"cuelang.org/go/internal/core/adt"
+)
+
+// Nodes man not reenter a disjunction.
+//
+// Copy one layer deep; throw away items on failure.
+
+// DISJUNCTION ALGORITHM
+//
+// The basic concept of the algorithm is to use backtracking to find valid
+// disjunctions. The algorithm can stop if two matching disjuncts are found
+// where one does not subsume the other.
+//
+// At a later point, we can introduce a filter step to filter out possible
+// disjuncts based on, say, discriminator fields or field exclusivity (oneOf
+// fields in Protobuf).
+//
+// To understand the details of the algorithm, it is important to understand
+// some properties of disjunction.
+//
+//
+// EVALUATION OF A DISJUNCTION IS SELF CONTAINED
+//
+// In other words, fields outside of a disjunction cannot bind to values within
+// a disjunction whilst evaluating that disjunction. This allows the computation
+// of disjunctions to be isolated from side effects.
+//
+// The intuition behind this is as follows: as a disjunction is not a concrete
+// value, it is not possible to lookup a field within a disjunction if it has
+// not yet been evaluated. So if a reference within a disjunction that is needed
+// to disambiguate that disjunction refers to a field outside the scope of the
+// disjunction which, in turn, refers to a field within the disjunction, this
+// results in a cycle error. We achieve this by not removing the cycle marker of
+// the Vertex of the disjunction until the disjunction is resolved.
+//
+// Note that the following disjunct is still allowed:
+//
+//    a: 1
+//    b: a
+//
+// Even though `a` refers to the root of the disjunction, it does not _select
+// into_ the disjunction. Implementation-wise, it also doesn't have to, as the
+// respective vertex is available within the Environment. Referencing a node
+// outside the disjunction that in turn selects the disjunction root, however,
+// will result in a detected cycle.
+//
+// As usual, cycle detection should be interpreted marked as incomplete, so that
+// the referring node will not be fixed to an error prematurely.
+//
+//
+// SUBSUMPTION OF AMBIGUOUS DISJUNCTS
+//
+// A disjunction can be evaluated to a concrete value if only one disjunct
+// remains. Aside from disambiguating through unification failure, disjuncts
+// may also be disambiguated by taking the least specific of two disjuncts.
+// For instance, if a subsumes b, then the result of disjunction may be a.
+//
+//   NEW ALGORITHM NO LONGER VERIFIES SUBSUMPTION. SUBSUMPTION IS INHERENTLY
+//   IMPRECISE (DUE TO BULK OPTIONAL FIELDS). OTHER THAN THAT, FOR SCALAR VALUES
+//   IT JUST MEANS THERE IS AMBIGUITY, AND FOR STRUCTS IT CAN LEAD TO STRANGE
+//   CONSEQUENCES.
+//
+//   USE EQUALITY INSTEAD:
+//     - Undefined == error for optional fields.
+//     - So only need to check exact labels for vertices.
+
+type envDisjunct struct {
+	env         *adt.Environment
+	values      []disjunct
+	numDefaults int
+	cloneID     uint32
+	isEmbed     bool
+}
+
+type disjunct struct {
+	expr      adt.Expr
+	isDefault bool
+}
+
+func (n *nodeContext) addDisjunction(env *adt.Environment, x *adt.DisjunctionExpr, cloneID uint32, isEmbed bool) {
+	a := []disjunct{}
+
+	numDefaults := 0
+	for _, v := range x.Values {
+		isDef := v.Default // || n.hasDefaults(env, v.Val)
+		if isDef {
+			numDefaults++
+		}
+		a = append(a, disjunct{v.Val, isDef})
+	}
+
+	sort.SliceStable(a, func(i, j int) bool {
+		return !a[j].isDefault && a[i].isDefault != a[j].isDefault
+	})
+
+	n.disjunctions = append(n.disjunctions,
+		envDisjunct{env, a, numDefaults, cloneID, isEmbed})
+}
+
+func (n *nodeContext) addDisjunctionValue(env *adt.Environment, x *adt.Disjunction, cloneID uint32, isEmbed bool) {
+	a := []disjunct{}
+
+	for i, v := range x.Values {
+		a = append(a, disjunct{v, i < x.NumDefaults})
+	}
+
+	n.disjunctions = append(n.disjunctions,
+		envDisjunct{env, a, x.NumDefaults, cloneID, isEmbed})
+}
+
+func (n *nodeContext) updateResult() (isFinal bool) {
+	n.postDisjunct()
+
+	if n.hasErr() {
+		return n.isFinal
+	}
+
+	d := n.nodeShared.disjunct
+	if d == nil {
+		d = &adt.Disjunction{}
+		n.nodeShared.disjunct = d
+	}
+
+	result := *n.node
+	if result.Value == nil {
+		result.Value = n.getValidators()
+	}
+
+	for _, v := range d.Values {
+		if Equal(n.ctx, v, &result) {
+			return isFinal
+		}
+	}
+
+	p := &result
+	d.Values = append(d.Values, p)
+	if n.defaultMode == isDefault {
+		// Keep defaults sorted first.
+		i := d.NumDefaults
+		j := i + 1
+		copy(d.Values[j:], d.Values[i:])
+		d.Values[i] = p
+		d.NumDefaults = j
+	}
+
+	// return n.isFinal
+
+	switch {
+	case !n.nodeShared.hasResult():
+
+	case n.nodeShared.isDefault() && n.defaultMode != isDefault:
+		return n.isFinal
+
+	case !n.nodeShared.isDefault() && n.defaultMode == isDefault:
+
+	default:
+		if Equal(n.ctx, n.node, n.result()) {
+			return n.isFinal
+		}
+
+		// TODO: Compute fancy error message.
+		n.nodeShared.resultNode = n
+		// n.nodeShared.result.AddErr(n.ctx, &adt.Bottom{
+		// 	Code: adt.IncompleteError,
+		// 	Err:  errors.Newf(n.ctx.Pos(), "ambiguous disjunction"),
+		// })
+		n.nodeShared.result_.Arcs = nil
+		n.nodeShared.result_.Structs = nil
+		return n.isFinal // n.defaultMode == isDefault
+	}
+
+	n.nodeShared.resultNode = n
+	n.nodeShared.setResult(n.node)
+
+	return n.isFinal
+}
+
+func (n *nodeContext) tryDisjuncts() (finished bool) {
+	if !n.insertDisjuncts() || !n.updateResult() {
+		if !n.isFinal {
+			return false // More iterations to do.
+		}
+	}
+
+	if n.nodeShared.hasResult() {
+		return true // found something
+	}
+
+	if len(n.disjunctions) > 0 {
+		b := &adt.Bottom{
+			// TODO(errors): we should not make this error worse by discarding
+			// the type or error. Using IncompleteError is a compromise. But
+			// really we should keep track of the errors and return a more
+			// accurate result here.
+			Code: adt.IncompleteError,
+			Err:  errors.Newf(token.NoPos, "empty disjunction"),
+		}
+		n.node.AddErr(n.ctx, b)
+	}
+	return true
+}
+
+// TODO: add proper conjuncts for the ones used by the disjunctions to replace
+// the original source.
+//
+func (n *nodeContext) insertDisjuncts() (inserted bool) {
+	p := 0
+	inserted = true
+
+	disjunctions := []envDisjunct{}
+
+	// fmt.Println("----", debug.NodeString(n.ctx, n.node, nil))
+	for _, d := range n.disjunctions {
+		disjunctions = append(disjunctions, d)
+
+		sub := len(n.disjunctions)
+		defMode, ok := n.insertSingleDisjunct(p, d, false)
+		p++
+		if !ok {
+			inserted = false
+			break
+		}
+
+		subMode := []defaultMode{}
+		for ; sub < len(n.disjunctions); sub++ {
+			d := n.disjunctions[sub]
+			disjunctions = append(disjunctions, d)
+			mode, ok := n.insertSingleDisjunct(p, d, true)
+			p++
+			if !ok {
+				inserted = false
+				break
+			}
+			subMode = append(subMode, mode)
+		}
+		for i := len(subMode) - 1; i >= 0; i-- {
+			defMode = combineSubDefault(defMode, subMode[i])
+		}
+
+		// fmt.Println("RESMODE", defMode, combineDefault(n.defaultMode, defMode))
+
+		n.defaultMode = combineDefault(n.defaultMode, defMode)
+	}
+
+	// Find last disjunction at which there is no overflow.
+	for ; p > 0 && n.stack[p-1]+1 >= len(disjunctions[p-1].values); p-- {
+	}
+	if p > 0 {
+		// Increment a valid position and set all subsequent entries to 0.
+		n.stack[p-1]++
+		n.stack = n.stack[:p]
+	}
+	return inserted
+}
+
+func (n *nodeContext) insertSingleDisjunct(p int, d envDisjunct, isSub bool) (mode defaultMode, ok bool) {
+	if p >= len(n.stack) {
+		n.stack = append(n.stack, 0)
+	}
+
+	k := n.stack[p]
+	v := d.values[k]
+	n.isFinal = n.isFinal && k == len(d.values)-1
+	c := adt.MakeConjunct(d.env, v.expr)
+	n.addExprConjunct(c, d.cloneID, d.isEmbed)
+
+	for n.expandOne() {
+	}
+
+	switch {
+	case d.numDefaults == 0:
+		mode = maybeDefault
+	case v.isDefault:
+		mode = isDefault
+	default:
+		mode = notDefault
+	}
+
+	return mode, !n.hasErr()
+}
+
+// Default rules from spec:
+//
+// U1: (v1, d1) & v2       => (v1&v2, d1&v2)
+// U2: (v1, d1) & (v2, d2) => (v1&v2, d1&d2)
+//
+// D1: (v1, d1) | v2       => (v1|v2, d1)
+// D2: (v1, d1) | (v2, d2) => (v1|v2, d1|d2)
+//
+// M1: *v        => (v, v)
+// M2: *(v1, d1) => (v1, d1)
+// or
+// M2: *(v1, d1) => (v1, v1)
+// or
+// M2: *(v1, d1) => v1 if d1 == _|_
+// M2:              d1 otherwise
+//
+// def + maybe -> def
+// not + maybe -> def
+// not + def   -> def
+
+type defaultMode int
+
+const (
+	maybeDefault defaultMode = iota
+	notDefault
+	isDefault
+)
+
+func combineSubDefault(a, b defaultMode) defaultMode {
+	switch {
+	case a == maybeDefault && b == maybeDefault:
+		return maybeDefault
+	case a == maybeDefault && b == notDefault:
+		return notDefault
+	case a == maybeDefault && b == isDefault:
+		return isDefault
+	case a == notDefault && b == maybeDefault:
+		return notDefault
+	case a == notDefault && b == notDefault:
+		return notDefault
+	case a == notDefault && b == isDefault:
+		return isDefault
+	case a == isDefault && b == maybeDefault:
+		return isDefault
+	case a == isDefault && b == notDefault:
+		return notDefault
+	case a == isDefault && b == isDefault:
+		return isDefault
+	default:
+		panic("unreachable")
+	}
+}
+
+func combineDefault(a, b defaultMode) defaultMode {
+	if a > b {
+		a, b = b, a
+	}
+	switch {
+	case a == maybeDefault && b == maybeDefault:
+		return maybeDefault
+	case a == maybeDefault && b == notDefault:
+		return notDefault
+	case a == maybeDefault && b == isDefault:
+		return isDefault
+	case a == notDefault && b == notDefault:
+		return notDefault
+	case a == notDefault && b == isDefault:
+		return notDefault
+	case a == isDefault && b == isDefault:
+		return isDefault
+	default:
+		panic("unreachable")
+	}
+}
diff --git a/internal/core/eval/equality.go b/internal/core/eval/equality.go
new file mode 100644
index 0000000..cb9f9da
--- /dev/null
+++ b/internal/core/eval/equality.go
@@ -0,0 +1,134 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import "cuelang.org/go/internal/core/adt"
+
+func Equal(ctx *adt.OpContext, v, w adt.Value) bool {
+	if x, ok := v.(*adt.Vertex); ok {
+		return equalVertex(ctx, x, w)
+	}
+	if y, ok := w.(*adt.Vertex); ok {
+		return equalVertex(ctx, y, v)
+	}
+	return equalTerminal(ctx, v, w)
+}
+
+func equalVertex(ctx *adt.OpContext, x *adt.Vertex, v adt.Value) bool {
+	y, ok := v.(*adt.Vertex)
+	if !ok {
+		return false
+	}
+	if x == y {
+		return true
+	}
+	if len(x.Arcs) != len(y.Arcs) {
+		return false
+	}
+	if len(x.Arcs) == 0 && len(y.Arcs) == 0 {
+		return equalTerminal(ctx, x.Value, y.Value)
+	}
+
+loop1:
+	for _, a := range x.Arcs {
+		for _, b := range y.Arcs {
+			if a.Label == b.Label {
+				if !Equal(ctx, a, b) {
+					return false
+				}
+				continue loop1
+			}
+		}
+		return false
+	}
+
+	// We do not need to do the following check, because of the pigeon-hole principle.
+	// loop2:
+	// 	for _, b := range y.Arcs {
+	// 		for _, a := range x.Arcs {
+	// 			if a.Label == b.Label {
+	// 				continue loop2
+	// 			}
+	// 		}
+	// 		return false
+	// 	}
+
+	return equalTerminal(ctx, x.Value, y.Value)
+}
+
+func equalTerminal(ctx *adt.OpContext, v, w adt.Value) bool {
+	if v == w {
+		return true
+	}
+	switch x := v.(type) {
+	case *adt.Num, *adt.String, *adt.Bool, *adt.Bytes:
+		if b, ok := adt.BinOp(ctx, adt.EqualOp, v, w).(*adt.Bool); ok {
+			return b.B
+		}
+		return false
+
+	// TODO: for the remainder we are dealing with non-concrete values, so we
+	// could also just not bother.
+
+	case *adt.BoundValue:
+		if y, ok := w.(*adt.BoundValue); ok {
+			return x.Op == y.Op && Equal(ctx, x.Value, y.Value)
+		}
+
+	case *adt.BasicType:
+		if y, ok := w.(*adt.BasicType); ok {
+			return x.K == y.K
+		}
+
+	case *adt.Conjunction:
+		y, ok := w.(*adt.Conjunction)
+		if !ok || len(x.Values) != len(y.Values) {
+			return false
+		}
+		// always ordered the same
+		for i, xe := range x.Values {
+			if !Equal(ctx, xe, y.Values[i]) {
+				return false
+			}
+		}
+		return true
+
+	case *adt.Disjunction:
+		// The best way to compute this is with subsumption, but even that won't
+		// be too accurate. Assume structural equivalence for now.
+		y, ok := w.(*adt.Disjunction)
+		if !ok || len(x.Values) != len(y.Values) {
+			return false
+		}
+		for i, xe := range x.Values {
+			if !Equal(ctx, xe, y.Values[i]) {
+				return false
+			}
+		}
+		return true
+
+	case *adt.ListMarker:
+		_, ok := w.(*adt.ListMarker)
+		return ok
+
+	case *adt.StructMarker:
+		_, ok := w.(*adt.StructMarker)
+		return ok
+
+	case *adt.BuiltinValidator:
+	}
+
+	return false
+}
diff --git a/internal/core/eval/eval.go b/internal/core/eval/eval.go
new file mode 100644
index 0000000..28fe99b
--- /dev/null
+++ b/internal/core/eval/eval.go
@@ -0,0 +1,1504 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package eval contains the high level CUE evaluation strategy.
+//
+// CUE allows for a significant amount of freedom in order of evaluation due to
+// the commutativity of the unification operation. This package implements one
+// of the possible strategies.
+package eval
+
+// TODO:
+//   - result should be nodeContext: this allows optionals info to be extracted
+//     and computed.
+//
+
+import (
+	"fmt"
+
+	"cuelang.org/go/cue/ast"
+	"cuelang.org/go/cue/errors"
+	"cuelang.org/go/cue/token"
+	"cuelang.org/go/internal/core/adt"
+	"cuelang.org/go/internal/core/debug"
+)
+
+func Evaluate(r adt.Runtime, v *adt.Vertex) {
+	format := func(n adt.Node) string {
+		return debug.NodeString(r, n, printConfig)
+	}
+	e := New(r)
+	c := adt.New(v, &adt.Config{
+		Runtime: r,
+		Unifier: e,
+		Format:  format,
+	})
+	e.Unify(c, v, adt.Finalized)
+}
+
+func New(r adt.Runtime) *Evaluator {
+	return &Evaluator{r: r, index: r}
+}
+
+// TODO: Note: NewContext takes essentially a cue.Value. By making this
+// type more central, we can perhaps avoid context creation.
+
+func NewContext(r adt.Runtime, v *adt.Vertex) *adt.OpContext {
+	e := New(r)
+	return e.NewContext(v)
+}
+
+var printConfig = &debug.Config{Compact: true}
+
+func (e *Evaluator) NewContext(v *adt.Vertex) *adt.OpContext {
+	format := func(n adt.Node) string {
+		return debug.NodeString(e.r, n, printConfig)
+	}
+	return adt.New(v, &adt.Config{
+		Runtime: e.r,
+		Unifier: e,
+		Format:  format,
+	})
+}
+
+var structSentinel = &adt.StructMarker{}
+
+var incompleteSentinel = &adt.Bottom{
+	Code: adt.IncompleteError,
+	Err:  errors.Newf(token.NoPos, "incomplete"),
+}
+
+type Evaluator struct {
+	r       adt.Runtime
+	index   adt.StringIndexer
+	closeID uint32
+}
+
+func (e *Evaluator) nextID() uint32 {
+	e.closeID++
+	return e.closeID
+}
+
+func (e *Evaluator) Eval(v *adt.Vertex) errors.Error {
+	if v.Value == nil {
+		ctx := adt.NewContext(e.r, e, v)
+		e.Unify(ctx, v, adt.Finalized)
+	}
+
+	// extract error if needed.
+	return nil
+}
+
+// Evaluate is used to evaluate a sub expression while evaluating a Vertex
+// with Unify. It may or may not return the original Vertex. It may also
+// terminate evaluation early if it has enough evidence that a certain value
+// can be the only value in a valid configuration. This means that an error
+// may go undetected at this point, as long as it is caught later.
+//
+func (e *Evaluator) Evaluate(c *adt.OpContext, v *adt.Vertex) adt.Value {
+	var resultValue adt.Value
+	if v.Value == nil {
+		save := *v
+		// Use node itself to allow for cycle detection.
+		s := e.evalVertex(c, v, adt.Partial)
+
+		if d := s.disjunct; d != nil && len(d.Values) > 1 && d.NumDefaults != 1 {
+			v.Value = d
+			v.Arcs = nil
+			v.Structs = nil // TODO: maybe not do this.
+			// The conjuncts will have too much information. Better have no
+			// information than incorrect information.
+			for _, d := range d.Values {
+				d.Conjuncts = nil
+			}
+		}
+
+		resultValue = v.Value
+
+		result := s.result()
+		*v = save
+
+		if result.Value != nil {
+			*v = *result
+			resultValue = result.Value
+		}
+
+		// TODO: this seems unnecessary as long as we have a better way
+		// to handle incomplete, and perhaps referenced. nodes.
+		if c.IsTentative() && isStruct(v) {
+			// TODO(perf): do something more efficient perhaps? This discards
+			// the computed arcs so far. Instead, we could have a separate
+			// marker to accumulate results. As this only happens within
+			// comprehensions, the effect is likely minimal, though.
+			arcs := v.Arcs
+			*v = save
+			return &adt.Vertex{
+				Parent: v.Parent,
+				Value:  &adt.StructMarker{},
+				Arcs:   arcs,
+			}
+		}
+		// *v = save // DO NOT ADD.
+		err, _ := resultValue.(*adt.Bottom)
+		// BEFORE RESTORING, copy the value to return one
+		// with the temporary arcs.
+		if !s.done() && (err == nil || err.IsIncomplete()) {
+			// Clear values afterwards
+			*v = save
+		}
+		if !s.done() && s.hasDisjunction() {
+			return &adt.Bottom{Code: adt.IncompleteError}
+		}
+		if s.hasResult() {
+			if b, _ := v.Value.(*adt.Bottom); b != nil {
+				*v = save
+				return b
+			}
+			// TODO: Only use result when not a cycle.
+			v = result
+		}
+		// TODO: Store if concrete and fully resolved.
+
+	} else {
+		b, _ := v.Value.(*adt.Bottom)
+		if b != nil {
+			return b
+		}
+	}
+
+	switch v.Value.(type) {
+	case nil:
+		// Error saved in result.
+		return resultValue // incomplete
+
+	case *adt.ListMarker, *adt.StructMarker:
+		return v
+
+	default:
+		return v.Value
+	}
+}
+
+// Unify implements adt.Unifier.
+//
+// May not evaluate the entire value, but just enough to be able to compute.
+//
+// Phase one: record everything concrete
+// Phase two: record incomplete
+// Phase three: record cycle.
+func (e *Evaluator) Unify(c *adt.OpContext, v *adt.Vertex, state adt.VertexStatus) {
+	// defer c.PopVertex(c.PushVertex(v))
+
+	if state <= v.Status()+1 {
+		return
+	}
+
+	if x := v.Value; x != nil {
+		// if state == adt.Partial || x == cycle {
+		// 	return
+		// }
+		return
+	}
+
+	n := e.evalVertex(c, v, state)
+
+	switch d := n.disjunct; {
+	case d != nil && len(d.Values) == 1:
+		*v = *(d.Values[0])
+
+	case d != nil && len(d.Values) > 0:
+		v.Value = d
+		v.Arcs = nil
+		v.Structs = nil
+		// The conjuncts will have too much information. Better have no
+		// information than incorrect information.
+		for _, d := range d.Values {
+			d.Conjuncts = nil
+		}
+
+	default:
+		if r := n.result(); r.Value != nil {
+			*v = *r
+		}
+	}
+
+	// Else set it to something.
+
+	if v.Value == nil {
+		panic("errer")
+	}
+
+	// Check whether result is done.
+}
+
+// evalVertex computes the vertex results. The state indicates the minimum
+// status to which this vertex should be evaluated. It should be either
+// adt.Finalized or adt.Partial.
+func (e *Evaluator) evalVertex(c *adt.OpContext, v *adt.Vertex, state adt.VertexStatus) *nodeShared {
+	// fmt.Println(debug.NodeString(c.StringIndexer, v, nil))
+	shared := &nodeShared{
+		ctx:   c,
+		eval:  e,
+		node:  v,
+		stack: nil, // silence linter
+	}
+	saved := *v
+
+	for i := 0; ; i++ {
+
+		// Clear any remaining error.
+		if err := c.Err(); err != nil {
+			panic("uncaught error")
+		}
+
+		// Set the cache to a cycle error to ensure a cyclic reference will result
+		// in an error if applicable. A cyclic error may be ignored for
+		// non-expression references. The cycle error may also be removed as soon
+		// as there is evidence what a correct value must be, but before all
+		// validation has taken place.
+		*v = saved
+		v.Value = cycle
+		v.UpdateStatus(adt.Evaluating)
+
+		// If the result is a struct, it needs to be closed if:
+		//   1) this node introduces a definition
+		//   2) this node is a child of a node that introduces a definition,
+		//      recursively.
+		//   3) this node embeds a closed struct.
+		needClose := v.Label.IsDef()
+
+		n := &nodeContext{
+			kind:       adt.TopKind,
+			nodeShared: shared,
+			needClose:  needClose,
+
+			// These get cleared upon proof to the contrary.
+			// isDefault: true,
+			isFinal: true,
+		}
+
+		closeID := uint32(0)
+
+		for _, x := range v.Conjuncts {
+			closeID := closeID
+			// TODO: needed for reentrancy. Investigate usefulness for cycle
+			// detection.
+			if x.Env != nil && x.Env.CloseID != 0 {
+				closeID = x.Env.CloseID
+			}
+			n.addExprConjunct(x, closeID, true)
+		}
+
+		if i == 0 {
+			// Use maybeSetCache for cycle breaking
+			for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() {
+			}
+			if v.Status() > adt.Evaluating && state <= adt.Partial {
+				// We have found a partial result. There may still be errors
+				// down the line which may result from further evaluating this
+				// field, but that will be caught when evaluating this field
+				// for real.
+				shared.setResult(v)
+				return shared
+			}
+			if !n.done() && len(n.disjunctions) > 0 && isEvaluating(v) {
+				// We disallow entering computations of disjunctions with
+				// incomplete data.
+				b := c.NewErrf("incomplete cause disjunction")
+				b.Code = adt.IncompleteError
+				v.SetValue(n.ctx, adt.Finalized, b)
+				shared.setResult(v)
+				return shared
+			}
+		}
+
+		// Handle disjunctions. If there are no disjunctions, this call is
+		// equivalent to calling n.postDisjunct.
+		if n.tryDisjuncts() {
+			if v.Value == nil {
+				v.Value = n.getValidators()
+			}
+
+			break
+		}
+	}
+
+	return shared
+}
+
+func isStruct(v *adt.Vertex) bool {
+	_, ok := v.Value.(*adt.StructMarker)
+	return ok
+}
+
+func (n *nodeContext) postDisjunct() {
+	ctx := n.ctx
+
+	// Use maybeSetCache for cycle breaking
+	for n.maybeSetCache(); n.expandOne(); n.maybeSetCache() {
+	}
+
+	// TODO: preparation for association lists:
+	// We assume that association types may not be created dynamically for now.
+	// So we add lists
+	n.addLists(ctx)
+
+	switch err := n.getErr(); {
+	case err != nil:
+		n.node.Value = err
+		n.errs = nil
+
+	default:
+		if isEvaluating(n.node) {
+			// TODO: this does not yet validate all values.
+
+			if !n.done() { // && !ctx.IsTentative() {
+				// collect incomplete errors.
+				// 	len(n.ifClauses) == 0 &&
+				// 	len(n.forClauses) == 0 &&
+				var err *adt.Bottom // n.incomplete
+				// err = n.incomplete
+				for _, d := range n.dynamicFields {
+					x, _ := ctx.Concrete(d.env, d.field.Key, "dynamic field")
+					b, _ := x.(*adt.Bottom)
+					err = adt.CombineErrors(nil, err, b)
+				}
+				for _, c := range n.forClauses {
+					f := func(env *adt.Environment, st *adt.StructLit) {}
+					err = adt.CombineErrors(nil, err, ctx.Yield(c.env, c.yield, f))
+				}
+				for _, x := range n.exprs {
+					x, _ := ctx.Evaluate(x.Env, x.Expr())
+					b, _ := x.(*adt.Bottom)
+					err = adt.CombineErrors(nil, err, b)
+				}
+				if err == nil {
+					// safeguard.
+					err = incompleteSentinel
+				}
+				n.node.Value = err
+			} else {
+				n.node.Value = nil
+			}
+		}
+
+		// We are no longer evaluating.
+		n.node.UpdateStatus(adt.Partial)
+
+		// Either set to Conjunction or error.
+		var v adt.Value = n.node.Value
+		kind := n.kind
+		markStruct := false
+		if n.isStruct {
+			if kind != 0 && kind&adt.StructKind == 0 {
+				n.node.Value = &adt.Bottom{
+					Err: errors.Newf(token.NoPos,
+						"conflicting values struct and %s", n.kind),
+				}
+			}
+			markStruct = true
+		} else if len(n.node.Structs) > 0 {
+			markStruct = kind&adt.StructKind != 0 && !n.hasTop
+		}
+		if v == nil && markStruct {
+			kind = adt.StructKind
+			n.node.Value = &adt.StructMarker{}
+			v = n.node
+		}
+		if v != nil && adt.IsConcrete(v) {
+			if n.scalar != nil {
+				kind = n.scalar.Kind()
+			}
+			if v.Kind()&^kind != 0 {
+				p := token.NoPos
+				if src := v.Source(); src != nil {
+					p = src.Pos()
+				}
+				n.addErr(errors.Newf(p,
+					// TODO(err): position of all value types.
+					"conflicting types",
+				))
+			}
+			if n.lowerBound != nil {
+				if b := ctx.Validate(n.lowerBound, v); b != nil {
+					n.addBottom(b)
+				}
+			}
+			if n.upperBound != nil {
+				if b := ctx.Validate(n.upperBound, v); b != nil {
+					n.addBottom(b)
+				}
+			}
+			for _, v := range n.checks {
+				if b := ctx.Validate(v, n.node); b != nil {
+					n.addBottom(b)
+				}
+			}
+
+		} else if !ctx.IsTentative() {
+			n.node.Value = n.getValidators()
+		}
+		// else if v == nil {
+		// 	n.node.Value = incompleteSentinel
+		// }
+
+		if v == nil {
+			break
+		}
+
+		switch {
+		case v.Kind() == adt.ListKind:
+			for _, a := range n.node.Arcs {
+				if a.Label.Typ() == adt.StringLabel {
+					n.addErr(errors.Newf(token.NoPos,
+						// TODO(err): add positions for list and arc definitions.
+						"list may not have regular fields"))
+				}
+			}
+
+			// case !isStruct(n.node) && v.Kind() != adt.BottomKind:
+			// 	for _, a := range n.node.Arcs {
+			// 		if a.Label.IsRegular() {
+			// 			n.addErr(errors.Newf(token.NoPos,
+			// 				// TODO(err): add positions of non-struct values and arcs.
+			// 				"cannot combine scalar values with arcs"))
+			// 		}
+			// 	}
+		}
+	}
+
+	var c *CloseDef
+	if a, _ := n.node.Closed.(*acceptor); a != nil {
+		c = a.tree
+		n.needClose = n.needClose || a.isClosed
+	}
+
+	updated := updateClosed(c, n.replace)
+	if updated == nil && n.needClose {
+		updated = &CloseDef{}
+	}
+
+	// TODO retrieve from env.
+
+	if err := n.getErr(); err != nil {
+		if b, _ := n.node.Value.(*adt.Bottom); b != nil {
+			err = adt.CombineErrors(nil, b, err)
+		}
+		n.node.Value = err
+		// TODO: add return: if evaluation of arcs is important it can be done
+		// later. Logically we're done.
+	}
+
+	m := &acceptor{
+		tree:     updated,
+		fields:   n.optionals,
+		isClosed: n.needClose,
+		openList: n.openList,
+		isList:   n.node.IsList(),
+	}
+	if updated != nil || len(n.optionals) > 0 {
+		n.node.Closed = m
+	}
+
+	// Visit arcs recursively to validate and compute error.
+	for _, a := range n.node.Arcs {
+		if updated != nil {
+			a.Closed = m
+		}
+		if updated != nil && m.isClosed {
+			if err := m.verifyArcAllowed(n.ctx, a.Label); err != nil {
+				n.node.Value = err
+			}
+			// TODO: use continue to not process already failed fields,
+			// or at least don't record recursive error.
+			// continue
+		}
+		// Call UpdateStatus here to be absolutely sure the status is set
+		// correctly and that we are not regressing.
+		n.node.UpdateStatus(adt.EvaluatingArcs)
+		n.eval.Unify(ctx, a, adt.Finalized)
+		if err, _ := a.Value.(*adt.Bottom); err != nil {
+			n.node.AddChildError(err)
+		}
+	}
+
+	n.node.UpdateStatus(adt.Finalized)
+}
+
+// TODO: this is now a sentinel. Use a user-facing error that traces where
+// the cycle originates.
+var cycle = &adt.Bottom{
+	Err:  errors.Newf(token.NoPos, "cycle error"),
+	Code: adt.CycleError,
+}
+
+func isEvaluating(v *adt.Vertex) bool {
+	isCycle := v.Status() == adt.Evaluating
+	if isCycle != (v.Value == cycle) {
+		panic(fmt.Sprintf("cycle data of sync %d vs %#v", v.Status(), v.Value))
+	}
+	return isCycle
+}
+
+type nodeShared struct {
+	eval *Evaluator
+	ctx  *adt.OpContext
+	sub  []*adt.Environment // Environment cache
+	node *adt.Vertex
+
+	// Disjunction handling
+	disjunct   *adt.Disjunction
+	resultNode *nodeContext
+	result_    adt.Vertex
+	stack      []int
+}
+
+func (n *nodeShared) result() *adt.Vertex {
+	return &n.result_
+}
+
+func (n *nodeShared) setResult(v *adt.Vertex) {
+	n.result_ = *v
+}
+
+func (n *nodeShared) hasResult() bool {
+	return n.resultNode != nil //|| n.hasResult_
+	// return n.resultNode != nil || n.hasResult_
+}
+
+func (n *nodeShared) done() bool {
+	// if d := n.disjunct; d == nil || len(n.disjunct.Values) == 0 {
+	// 	return false
+	// }
+	if n.resultNode == nil {
+		return false
+	}
+	return n.resultNode.done()
+}
+
+func (n *nodeShared) hasDisjunction() bool {
+	if n.resultNode == nil {
+		return false
+	}
+	return len(n.resultNode.disjunctions) > 0
+}
+
+func (n *nodeShared) isDefault() bool {
+	if n.resultNode == nil {
+		return false
+	}
+	return n.resultNode.defaultMode == isDefault
+}
+
+// A nodeContext is used to collate all conjuncts of a value to facilitate
+// unification. Conceptually order of unification does not matter. However,
+// order has relevance when performing checks of non-monotic properities. Such
+// checks should only be performed once the full value is known.
+type nodeContext struct {
+	*nodeShared
+
+	// TODO:
+	// filter *adt.Vertex a subset of composite with concrete fields for
+	// bloom-like filtering of disjuncts. We should first verify, however,
+	// whether some breath-first search gives sufficient performance, as this
+	// should already ensure a quick-fail for struct disjunctions with
+	// discriminators.
+
+	// Current value (may be under construction)
+	scalar adt.Value // TODO: use Value in node.
+
+	// Concrete conjuncts
+	kind       adt.Kind
+	lowerBound *adt.BoundValue // > or >=
+	upperBound *adt.BoundValue // < or <=
+	checks     []adt.Validator // BuiltinValidator, other bound values.
+	errs       *adt.Bottom
+	incomplete *adt.Bottom
+
+	// Struct information
+	dynamicFields []envDynamic
+	ifClauses     []envYield
+	forClauses    []envYield
+	optionals     []fieldSet // env + field
+	// NeedClose:
+	// - node starts definition
+	// - embeds a definition
+	// - parent node is closing
+	needClose bool
+	openList  bool
+	isStruct  bool
+	hasTop    bool
+	newClose  *CloseDef
+	// closeID   uint32 // from parent, or if not exist, new if introducing a def.
+	replace map[uint32]*CloseDef
+
+	// Expression conjuncts
+	lists  []envList
+	vLists []*adt.Vertex
+	exprs  []conjunct
+
+	// Disjunction handling
+	disjunctions []envDisjunct
+	defaultMode  defaultMode
+	isFinal      bool
+}
+
+func (n *nodeContext) done() bool {
+	return len(n.dynamicFields) == 0 &&
+		len(n.ifClauses) == 0 &&
+		len(n.forClauses) == 0 &&
+		len(n.exprs) == 0
+}
+
+// hasErr is used to determine if an evaluation path, for instance a single
+// path after expanding all disjunctions, has an error.
+func (n *nodeContext) hasErr() bool {
+	if n.node.ChildErrors != nil {
+		return true
+	}
+	if n.node.Status() > adt.Evaluating && n.node.IsErr() {
+		return true
+	}
+	return n.ctx.HasErr() || n.errs != nil
+}
+
+func (n *nodeContext) getErr() *adt.Bottom {
+	n.errs = adt.CombineErrors(nil, n.errs, n.ctx.Err())
+	return n.errs
+}
+
+// getValidators sets the vertex' Value in case there was no concrete value.
+func (n *nodeContext) getValidators() adt.Value {
+	ctx := n.ctx
+
+	a := []adt.Value{}
+	// if n.node.Value != nil {
+	// 	a = append(a, n.node.Value)
+	// }
+	kind := adt.TopKind
+	if n.lowerBound != nil {
+		a = append(a, n.lowerBound)
+		kind &= n.lowerBound.Kind()
+	}
+	if n.upperBound != nil {
+		a = append(a, n.upperBound)
+		kind &= n.upperBound.Kind()
+	}
+	for _, c := range n.checks {
+		// Drop !=x if x is out of bounds with another bound.
+		if b, _ := c.(*adt.BoundValue); b != nil && b.Op == adt.NotEqualOp {
+			if n.upperBound != nil &&
+				adt.SimplifyBounds(ctx, n.kind, n.upperBound, b) != nil {
+				continue
+			}
+			if n.lowerBound != nil &&
+				adt.SimplifyBounds(ctx, n.kind, n.lowerBound, b) != nil {
+				continue
+			}
+		}
+		a = append(a, c)
+		kind &= c.Kind()
+	}
+	if kind&^n.kind != 0 {
+		a = append(a, &adt.BasicType{K: n.kind})
+	}
+
+	var v adt.Value
+	switch len(a) {
+	case 0:
+		// Src is the combined input.
+		v = &adt.BasicType{K: n.kind}
+
+		// TODO: Change to isStruct?
+		if len(n.node.Structs) > 0 {
+			// n.isStruct = true
+			v = structSentinel
+
+		}
+
+	case 1:
+		v = a[0].(adt.Value)
+
+	default:
+		v = &adt.Conjunction{Values: a}
+	}
+
+	return v
+}
+
+func (n *nodeContext) maybeSetCache() {
+	if n.node.Status() > adt.Evaluating { // n.node.Value != nil
+		return
+	}
+	if n.scalar != nil {
+		n.node.SetValue(n.ctx, adt.Partial, n.scalar)
+	}
+	if n.errs != nil {
+		n.node.SetValue(n.ctx, adt.Partial, n.errs)
+	}
+}
+
+type conjunct struct {
+	adt.Conjunct
+	closeID uint32
+	top     bool
+}
+
+type envDynamic struct {
+	env   *adt.Environment
+	field *adt.DynamicField
+}
+
+type envYield struct {
+	env   *adt.Environment
+	yield adt.Yielder
+}
+
+type envList struct {
+	env     *adt.Environment
+	list    *adt.ListLit
+	n       int64 // recorded length after evaluator
+	elipsis *adt.Ellipsis
+}
+
+func (n *nodeContext) addBottom(b *adt.Bottom) {
+	n.errs = adt.CombineErrors(nil, n.errs, b)
+}
+
+func (n *nodeContext) addErr(err errors.Error) {
+	if err != nil {
+		n.errs = adt.CombineErrors(nil, n.errs, &adt.Bottom{
+			Err: err,
+		})
+	}
+}
+
+// addExprConjuncts will attempt to evaluate an adt.Expr and insert the value
+// into the nodeContext if successful or queue it for later evaluation if it is
+// incomplete or is not value.
+func (n *nodeContext) addExprConjunct(v adt.Conjunct, def uint32, top bool) {
+	env := v.Env
+	if env != nil && env.CloseID != def {
+		e := *env
+		e.CloseID = def
+		env = &e
+	}
+	switch x := v.Expr().(type) {
+	case adt.Value:
+		n.addValueConjunct(env, x)
+
+	case *adt.BinaryExpr:
+		if x.Op == adt.AndOp {
+			n.addExprConjunct(adt.MakeConjunct(env, x.X), def, false)
+			n.addExprConjunct(adt.MakeConjunct(env, x.Y), def, false)
+		} else {
+			n.evalExpr(v, def, top)
+		}
+
+	case *adt.StructLit:
+		n.addStruct(env, x, def, top)
+
+	case *adt.ListLit:
+		n.lists = append(n.lists, envList{env: env, list: x})
+
+	case *adt.DisjunctionExpr:
+		if n.disjunctions != nil {
+			_ = n.disjunctions
+		}
+		n.addDisjunction(env, x, def, top)
+
+	default:
+		// Must be Resolver or Evaluator.
+		n.evalExpr(v, def, top)
+	}
+
+	if top {
+		n.updateReplace(v.Env)
+	}
+}
+
+// evalExpr is only called by addExprConjunct.
+func (n *nodeContext) evalExpr(v adt.Conjunct, closeID uint32, top bool) {
+	// Require an Environment.
+	ctx := n.ctx
+
+	switch x := v.Expr().(type) {
+	case adt.Resolver:
+		arc, err := ctx.Resolve(v.Env, x)
+		if err != nil {
+			if err.IsIncomplete() {
+				n.incomplete = adt.CombineErrors(nil, n.incomplete, err)
+			} else {
+				n.addBottom(err)
+				break
+			}
+		}
+		if arc == nil {
+			n.exprs = append(n.exprs, conjunct{v, closeID, top})
+			break
+		}
+
+		// If this is a cycle error, we have reached a fixed point and adding
+		// conjuncts at this point will not change the value. Also, continuing
+		// to pursue this value will result in an infinite loop.
+		//
+		// TODO: add a mechanism so that the computation will only have to be
+		// one once?
+		if isEvaluating(arc) {
+			break
+		}
+
+		// TODO: detect structural cycles here. A structural cycle can occur
+		// if it is not a reference cycle, but refers to a parent. node.
+		// This should only be allowed if it is unified with a finite structure.
+
+		if arc.Label.IsDef() {
+			n.insertClosed(arc)
+		} else {
+			for _, a := range arc.Conjuncts {
+				n.addExprConjunct(a, closeID, top)
+			}
+		}
+
+	case adt.Evaluator:
+		// adt.Interpolation, adt.UnaryExpr, adt.BinaryExpr, adt.CallExpr
+		val, complete := ctx.Evaluate(v.Env, v.Expr())
+		if !complete {
+			n.exprs = append(n.exprs, conjunct{v, closeID, top})
+			break
+		}
+
+		if v, ok := val.(*adt.Vertex); ok {
+			// Handle generated disjunctions (as in the 'or' builtin).
+			// These come as a Vertex, but should not be added as a value.
+			b, ok := v.Value.(*adt.Bottom)
+			if ok && b.IsIncomplete() && len(v.Conjuncts) > 0 {
+				for _, c := range v.Conjuncts {
+					n.addExprConjunct(c, closeID, top)
+				}
+				break
+			}
+		}
+
+		// TODO: insert in vertex as well
+		n.addValueConjunct(v.Env, val)
+
+	default:
+		panic(fmt.Sprintf("unknown expression of type %T", x))
+	}
+}
+
+func (n *nodeContext) insertClosed(arc *adt.Vertex) {
+	id := n.eval.nextID()
+	n.needClose = true
+
+	current := n.newClose
+	n.newClose = nil
+
+	for _, a := range arc.Conjuncts {
+		n.addExprConjunct(a, id, false)
+	}
+
+	current, n.newClose = n.newClose, current
+
+	if current == nil {
+		current = &CloseDef{ID: id}
+	}
+	n.addAnd(current)
+}
+
+func (n *nodeContext) addValueConjunct(env *adt.Environment, v adt.Value) {
+	if x, ok := v.(*adt.Vertex); ok {
+		needClose := false
+		if isStruct(x) {
+			n.isStruct = true
+			// TODO: find better way to mark as struct.
+			// For instance, we may want to add a faux
+			// Structlit for topological sort.
+			// return
+
+			if x.IsClosed(n.ctx) {
+				needClose = true
+			}
+
+			n.node.AddStructs(x.Structs...)
+		}
+
+		if len(x.Conjuncts) > 0 {
+			if needClose {
+				n.insertClosed(x)
+				return
+			}
+			for _, c := range x.Conjuncts {
+				n.addExprConjunct(c, 0, false) // Pass from eval
+			}
+			return
+		}
+
+		if x.IsList() {
+			n.vLists = append(n.vLists, x)
+			return
+		}
+
+		// TODO: evaluate value?
+		switch v := x.Value.(type) {
+		case *adt.ListMarker:
+			panic("unreachable")
+
+		case *adt.StructMarker:
+			for _, a := range x.Arcs {
+				// TODO, insert here as
+				n.insertField(a.Label, adt.MakeConjunct(nil, a))
+				// sub, _ := n.node.GetArc(a.Label)
+				// sub.Add(a)
+			}
+
+		default:
+			n.addValueConjunct(env, v)
+
+			for _, a := range x.Arcs {
+				// TODO, insert here as
+				n.insertField(a.Label, adt.MakeConjunct(nil, a))
+				// sub, _ := n.node.GetArc(a.Label)
+				// sub.Add(a)
+			}
+		}
+
+		return
+		// TODO: Use the Closer to close other fields as well?
+	}
+
+	if b, ok := v.(*adt.Bottom); ok {
+		n.addBottom(b)
+		return
+	}
+
+	ctx := n.ctx
+	kind := n.kind & v.Kind()
+	if kind == adt.BottomKind {
+		// TODO: how to get other conflicting values?
+		n.addErr(errors.Newf(token.NoPos,
+			"invalid value %s (mismatched types %s and %s)",
+			ctx.Str(v), v.Kind(), n.kind))
+		return
+	}
+	n.kind = kind
+
+	switch x := v.(type) {
+	case *adt.Disjunction:
+		n.addDisjunctionValue(env, x, 0, true)
+
+	case *adt.Conjunction:
+		for _, x := range x.Values {
+			n.addValueConjunct(env, x)
+		}
+
+	case *adt.Top:
+		n.hasTop = true
+		// TODO: Is this correct. Needed for elipsis, but not sure for others.
+		n.optionals = append(n.optionals, fieldSet{env: env, isOpen: true})
+
+	case *adt.BasicType:
+
+	case *adt.BoundValue:
+		switch x.Op {
+		case adt.LessThanOp, adt.LessEqualOp:
+			if y := n.upperBound; y != nil {
+				n.upperBound = nil
+				n.addValueConjunct(env, adt.SimplifyBounds(ctx, n.kind, x, y))
+				return
+			}
+			n.upperBound = x
+
+		case adt.GreaterThanOp, adt.GreaterEqualOp:
+			if y := n.lowerBound; y != nil {
+				n.lowerBound = nil
+				n.addValueConjunct(env, adt.SimplifyBounds(ctx, n.kind, x, y))
+				return
+			}
+			n.lowerBound = x
+
+		case adt.EqualOp, adt.NotEqualOp, adt.MatchOp, adt.NotMatchOp:
+			n.checks = append(n.checks, x)
+			return
+		}
+
+	case adt.Validator:
+		n.checks = append(n.checks, x)
+
+	case *adt.Vertex:
+	// handled above.
+
+	case adt.Value: // *NullLit, *BoolLit, *NumLit, *StringLit, *BytesLit
+		if y := n.scalar; y != nil {
+			if b, ok := adt.BinOp(ctx, adt.EqualOp, x, y).(*adt.Bool); !ok || !b.B {
+				n.addErr(errors.Newf(ctx.Pos(), "incompatible values %s and %s", ctx.Str(x), ctx.Str(y)))
+			}
+			// TODO: do we need to explicitly add again?
+			// n.scalar = nil
+			// n.addValueConjunct(c, adt.BinOp(c, adt.EqualOp, x, y))
+			break
+		}
+		n.scalar = x
+
+	default:
+		panic(fmt.Sprintf("unknown value type %T", x))
+	}
+
+	if n.lowerBound != nil && n.upperBound != nil {
+		if u := adt.SimplifyBounds(ctx, n.kind, n.lowerBound, n.upperBound); u != nil {
+			n.lowerBound = nil
+			n.upperBound = nil
+			n.addValueConjunct(env, u)
+		}
+	}
+}
+
+// addStruct collates the declarations of a struct.
+//
+// addStruct fulfills two additional pivotal functions:
+//   1) Implement vertex unification (this happends through De Bruijn indices
+//      combined with proper set up of Environments).
+//   2) Implied closedness for definitions.
+//
+func (n *nodeContext) addStruct(
+	env *adt.Environment,
+	s *adt.StructLit,
+	newDef uint32,
+	top bool) {
+
+	ctx := n.ctx
+	n.node.AddStructs(s)
+
+	// Inherit closeID from environment, unless this is a new definition.
+	closeID := newDef
+	if closeID == 0 && env != nil {
+		closeID = env.CloseID
+	}
+
+	// NOTE: This is a crucial point in the code:
+	// Unification derferencing happens here. The child nodes are set to
+	// an Environment linked to the current node. Together with the De Bruijn
+	// indices, this determines to which Vertex a reference resolves.
+
+	// TODO(perf): consider using environment cache:
+	// var childEnv *adt.Environment
+	// for _, s := range n.nodeCache.sub {
+	// 	if s.Up == env {
+	// 		childEnv = s
+	// 	}
+	// }
+	childEnv := &adt.Environment{
+		Up:      env,
+		Vertex:  n.node,
+		CloseID: closeID,
+	}
+
+	var hasOther, hasBulk adt.Node
+
+	opt := fieldSet{env: childEnv}
+
+	for _, d := range s.Decls {
+		switch x := d.(type) {
+		case *adt.Field:
+			opt.MarkField(ctx, x)
+			// handle in next iteration.
+
+		case *adt.OptionalField:
+			opt.AddOptional(ctx, x)
+
+		case *adt.DynamicField:
+			hasOther = x
+			n.dynamicFields = append(n.dynamicFields, envDynamic{childEnv, x})
+			opt.AddDynamic(ctx, childEnv, x)
+
+		case *adt.ForClause:
+			hasOther = x
+			n.forClauses = append(n.forClauses, envYield{childEnv, x})
+
+		case adt.Yielder:
+			hasOther = x
+			n.ifClauses = append(n.ifClauses, envYield{childEnv, x})
+
+		case adt.Expr:
+			// push and opo embedding type.
+			id := n.eval.nextID()
+
+			current := n.newClose
+			n.newClose = nil
+
+			hasOther = x
+			n.addExprConjunct(adt.MakeConjunct(childEnv, x), id, false)
+
+			current, n.newClose = n.newClose, current
+
+			if current == nil {
+				current = &CloseDef{ID: id} // TODO: isClosed?
+			} else {
+				// n.needClose = true
+			}
+			n.addOr(closeID, current)
+
+		case *adt.BulkOptionalField:
+			hasBulk = x
+			opt.AddBulk(ctx, x)
+
+		case *adt.Ellipsis:
+			hasBulk = x
+			opt.AddEllipsis(ctx, x)
+
+		default:
+			panic("unreachable")
+		}
+	}
+
+	if hasBulk != nil && hasOther != nil {
+		n.addErr(errors.Newf(token.NoPos, "cannot mix bulk optional fields with dynamic fields, embeddings, or comprehensions within the same struct"))
+	}
+
+	// Apply existing fields
+	for _, arc := range n.node.Arcs {
+		// Reuse adt.Acceptor interface.
+		opt.MatchAndInsert(ctx, arc)
+	}
+
+	n.optionals = append(n.optionals, opt)
+
+	for _, d := range s.Decls {
+		switch x := d.(type) {
+		case *adt.Field:
+			n.insertField(x.Label, adt.MakeConjunct(childEnv, x))
+		}
+	}
+}
+
+func (n *nodeContext) insertField(f adt.Feature, x adt.Conjunct) *adt.Vertex {
+	ctx := n.ctx
+	arc, isNew := n.node.GetArc(f)
+
+	if f.IsString() {
+		n.isStruct = true
+	}
+
+	// TODO: disallow adding conjuncts when cache set?
+	arc.AddConjunct(x)
+
+	if isNew {
+		for _, o := range n.optionals {
+			o.MatchAndInsert(ctx, arc)
+		}
+	}
+	return arc
+}
+
+// expandOne adds dynamic fields to a node until a fixed point is reached.
+// On each iteration, dynamic fields that cannot resolve due to incomplete
+// values are skipped. They will be retried on the next iteration until no
+// progress can be made. Note that a dynamic field may add more dynamic fields.
+//
+// forClauses are processed after all other clauses. A struct may be referenced
+// before it is complete, meaning that fields added by other forms of injection
+// may influence the result of a for clause _after_ it has already been
+// processed. We could instead detect such insertion and feed it to the
+// ForClause to generate another entry or have the for clause be recomputed.
+// This seems to be too complicated and lead to iffy edge cases.
+// TODO(error): detect when a field is added to a struct that is already used
+// in a for clause.
+func (n *nodeContext) expandOne() (done bool) {
+	if n.done() {
+		return false
+	}
+
+	var progress bool
+
+	if progress = n.injectDynamic(); progress {
+		return true
+	}
+
+	if n.ifClauses, progress = n.injectEmbedded(n.ifClauses); progress {
+		return true
+	}
+
+	if n.forClauses, progress = n.injectEmbedded(n.forClauses); progress {
+		return true
+	}
+
+	// Do expressions after comprehensions, as comprehensions can never
+	// refer to embedded scalars, whereas expressions may refer to generated
+	// fields if we were to allow attributes to be defined alongside
+	// scalars.
+	exprs := n.exprs
+	n.exprs = n.exprs[:0]
+	for _, x := range exprs {
+		n.addExprConjunct(x.Conjunct, x.closeID, x.top)
+
+		// collect and and or
+	}
+	if len(n.exprs) < len(exprs) {
+		return true
+	}
+
+	// No progress, report error later if needed: unification with
+	// disjuncts may resolve this later later on.
+	return false
+}
+
+// injectDynamic evaluates and inserts dynamic declarations.
+func (n *nodeContext) injectDynamic() (progress bool) {
+	ctx := n.ctx
+	k := 0
+
+	a := n.dynamicFields
+	for _, d := range n.dynamicFields {
+		var f adt.Feature
+		v, complete := ctx.Evaluate(d.env, d.field.Key)
+		if !complete {
+			a[k] = d
+			k++
+			continue
+		}
+		if b, _ := v.(*adt.Bottom); b != nil {
+			n.addValueConjunct(nil, b)
+			continue
+		}
+		f = ctx.Label(v)
+		n.insertField(f, adt.MakeConjunct(d.env, d.field))
+	}
+
+	progress = k < len(n.dynamicFields)
+
+	n.dynamicFields = a[:k]
+
+	return progress
+}
+
+// injectEmbedded evaluates and inserts embeddings. It first evaluates all
+// embeddings before inserting the results to ensure that the order of
+// evaluation does not matter.
+func (n *nodeContext) injectEmbedded(all []envYield) (a []envYield, progress bool) {
+	ctx := n.ctx
+	type envStruct struct {
+		env *adt.Environment
+		s   *adt.StructLit
+	}
+	var sa []envStruct
+	f := func(env *adt.Environment, st *adt.StructLit) {
+		sa = append(sa, envStruct{env, st})
+	}
+
+	k := 0
+	for _, d := range all {
+		sa = sa[:0]
+
+		if err := ctx.Yield(d.env, d.yield, f); err != nil {
+			if err.IsIncomplete() {
+				all[k] = d
+				k++
+			} else {
+				// continue to collect other errors.
+				n.addBottom(err)
+			}
+			continue
+		}
+
+		for _, st := range sa {
+			n.addStruct(st.env, st.s, 0, true)
+		}
+	}
+
+	return all[:k], k < len(all)
+}
+
+// addLists
+//
+// TODO: association arrays:
+// If an association array marker was present in a struct, create a struct node
+// instead of a list node. In either case, a node may only have list fields
+// or struct fields and not both.
+//
+// addLists should be run after the fixpoint expansion:
+//    - it enforces that comprehensions may not refer to the list itself
+//    - there may be no other fields within the list.
+//
+// TODO(embeddedScalars): for embedded scalars, there should be another pass
+// of evaluation expressions after expanding lists.
+func (n *nodeContext) addLists(c *adt.OpContext) {
+	if len(n.lists) == 0 && len(n.vLists) == 0 {
+		return
+	}
+
+	for _, a := range n.node.Arcs {
+		if t := a.Label.Typ(); t == adt.StringLabel {
+			n.addErr(errors.Newf(token.NoPos, "conflicting types list and struct"))
+		}
+	}
+
+	// fmt.Println(len(n.lists), "ELNE")
+
+	isOpen := true
+	max := 0
+	var maxNode adt.Expr
+
+	for _, l := range n.vLists {
+		elems := l.Elems()
+		isClosed := l.IsClosed(c)
+
+		switch {
+		case len(elems) < max:
+			if isClosed {
+				n.invalidListLength(len(elems), max, l, maxNode)
+				continue
+			}
+
+		case len(elems) > max:
+			if !isOpen {
+				n.invalidListLength(max, len(elems), maxNode, l)
+				continue
+			}
+			isOpen = !isClosed
+			max = len(elems)
+			maxNode = l
+
+		case isClosed:
+			isOpen = false
+			maxNode = l
+		}
+
+		for _, a := range elems {
+			if a.Conjuncts == nil {
+				n.insertField(a.Label, adt.MakeConjunct(nil, a.Value))
+				continue
+			}
+			for _, c := range a.Conjuncts {
+				n.insertField(a.Label, c)
+			}
+		}
+	}
+
+outer:
+	for i, l := range n.lists {
+		index := int64(0)
+		hasComprehension := false
+		for j, elem := range l.list.Elems {
+			switch x := elem.(type) {
+			case adt.Yielder:
+				err := c.Yield(l.env, x, func(e *adt.Environment, st *adt.StructLit) {
+					label, err := adt.MakeLabel(x.Source(), index, adt.IntLabel)
+					n.addErr(err)
+					index++
+					n.insertField(label, adt.MakeConjunct(e, st))
+				})
+				hasComprehension = true
+				if err.IsIncomplete() {
+
+				}
+
+			case *adt.Ellipsis:
+				if j != len(l.list.Elems)-1 {
+					n.addErr(errors.Newf(token.NoPos,
+						"ellipsis must be last element in list"))
+				}
+
+				n.lists[i].elipsis = x
+
+			default:
+				label, err := adt.MakeLabel(x.Source(), index, adt.IntLabel)
+				n.addErr(err)
+				index++ // TODO: don't use insertField.
+				n.insertField(label, adt.MakeConjunct(l.env, x))
+			}
+
+			// Terminate early n case of runaway comprehension.
+			if !isOpen && int(index) > max {
+				n.invalidListLength(max, int(index), maxNode, l.list)
+				continue outer
+			}
+		}
+
+		switch closed := n.lists[i].elipsis == nil; {
+		case int(index) < max:
+			if closed {
+				n.invalidListLength(int(index), max, l.list, maxNode)
+				continue
+			}
+
+		case int(index) > max,
+			closed && isOpen,
+			(!closed == isOpen) && !hasComprehension:
+			max = int(index)
+			maxNode = l.list
+			isOpen = !closed
+		}
+
+		n.lists[i].n = index
+	}
+
+	// add additionalItem values to list and construct optionals.
+	elems := n.node.Elems()
+	for _, l := range n.vLists {
+		a, _ := l.Closed.(*acceptor)
+		if a == nil {
+			continue
+		}
+
+		newElems := l.Elems()
+		if len(newElems) >= len(elems) {
+			continue // error generated earlier, if applicable.
+		}
+
+		n.optionals = append(n.optionals, a.fields...)
+
+		for _, arc := range elems[len(newElems):] {
+			l.MatchAndInsert(c, arc)
+		}
+	}
+
+	for _, l := range n.lists {
+		if l.elipsis == nil {
+			continue
+		}
+
+		f := fieldSet{env: l.env}
+		f.AddEllipsis(c, l.elipsis)
+
+		n.optionals = append(n.optionals, f)
+
+		for _, arc := range elems[l.n:] {
+			f.MatchAndInsert(c, arc)
+		}
+	}
+
+	sources := []ast.Expr{}
+	// Add conjuncts for additional items.
+	for _, l := range n.lists {
+		if l.elipsis == nil {
+			continue
+		}
+		if src, _ := l.elipsis.Source().(ast.Expr); src != nil {
+			sources = append(sources, src)
+		}
+	}
+
+	n.openList = isOpen
+
+	n.node.SetValue(c, adt.Partial, &adt.ListMarker{
+		Src:    ast.NewBinExpr(token.AND, sources...),
+		IsOpen: isOpen,
+	})
+}
+
+func (n *nodeContext) invalidListLength(na, nb int, a, b adt.Expr) {
+	n.addErr(errors.Newf(n.ctx.Pos(),
+		"incompatible list lengths (%d and %d)", na, nb))
+}
diff --git a/internal/core/eval/eval_test.go b/internal/core/eval/eval_test.go
new file mode 100644
index 0000000..9dea9a5
--- /dev/null
+++ b/internal/core/eval/eval_test.go
@@ -0,0 +1,156 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+import (
+	"flag"
+	"fmt"
+	"testing"
+
+	"cuelang.org/go/cue/format"
+	"cuelang.org/go/cue/parser"
+	"cuelang.org/go/internal/core/adt"
+	"cuelang.org/go/internal/core/compile"
+	"cuelang.org/go/internal/core/debug"
+	"cuelang.org/go/internal/core/runtime"
+	"cuelang.org/go/internal/cuetxtar"
+	"cuelang.org/go/pkg/strings"
+)
+
+var (
+	update = flag.Bool("update", false, "update the test files")
+	todo   = flag.Bool("todo", false, "run tests marked with #todo-compile")
+)
+
+func TestEval(t *testing.T) {
+	test := cuetxtar.TxTarTest{
+		Root:   "../../../cue/testdata",
+		Name:   "eval",
+		Update: *update,
+		Skip:   alwaysSkip,
+		ToDo:   needFix,
+	}
+
+	if *todo {
+		test.ToDo = nil
+	}
+
+	r := runtime.New()
+
+	test.Run(t, func(t *cuetxtar.Test) {
+		a := t.ValidInstances()
+
+		v, err := compile.Files(nil, r, a[0].Files...)
+		if err != nil {
+			t.Fatal(err)
+		}
+
+		e := Evaluator{
+			r:     r,
+			index: r,
+		}
+
+		err = e.Eval(v)
+		t.WriteErrors(err)
+
+		if v == nil {
+			return
+		}
+
+		debug.WriteNode(t, r, v, &debug.Config{Cwd: t.Dir})
+		fmt.Fprintln(t)
+	})
+}
+
+var alwaysSkip = map[string]string{
+	"compile/erralias": "compile error",
+}
+
+var needFix = map[string]string{
+	"fulleval/048_dont_pass_incomplete_values_to_builtins": "import",
+	"fulleval/050_json_Marshaling_detects_incomplete":      "import",
+	"fulleval/051_detectIncompleteYAML":                    "import",
+	"fulleval/052_detectIncompleteJSON":                    "import",
+	"fulleval/056_issue314":                                "import",
+	"resolve/013_custom_validators":                        "import",
+
+	"export/027": "cycle",
+	"export/028": "cycle",
+	"export/030": "cycle",
+
+	"cycle/025_cannot_resolve_references_that_would_be_ambiguous": "cycle",
+
+	"export/020":                  "builtin",
+	"resolve/034_closing_structs": "builtin",
+	"resolve/048_builtins":        "builtin",
+
+	"fulleval/027_len_of_incomplete_types": "builtin",
+
+	"fulleval/032_or_builtin_should_not_fail_on_non-concrete_empty_list": "builtin",
+
+	"fulleval/049_alias_reuse_in_nested_scope": "builtin",
+	"fulleval/053_issue312":                    "builtin",
+}
+
+// TestX is for debugging. Do not delete.
+func TestX(t *testing.T) {
+	t.Skip()
+	in := `
+	// max: >99 | *((5|*1) & 5)
+	// *( 5 | *_|_ )
+	// 1 | *((5|*1) & 5)
+
+
+	max: >= (num+0) | * (num+0)
+	res: !=4 | * 1
+	num:  *(1+(res+0)) | >(res+0)
+
+    // (1 | *2 | 3) & (1 | 2 | *3)
+
+	// m1: (*1 | (*2 | 3)) & (>=2 & <=3)
+	// m2: (*1 | (*2 | 3)) & (2 | 3)
+	// m3: (*1 | *(*2 | 3)) & (2 | 3)
+	// b: (*"a" | "b") | "c"
+	// {a: 1} | {b: 2}
+	`
+
+	if strings.TrimSpace(in) == "" {
+		t.Skip()
+	}
+
+	file, err := parser.ParseFile("TestX", in)
+	if err != nil {
+		t.Fatal(err)
+	}
+	r := runtime.New()
+
+	b, err := format.Node(file)
+	_, _ = b, err
+	// fmt.Println(string(b), err)
+
+	v, err := compile.Files(nil, r, file)
+	if err != nil {
+		t.Fatal(err)
+	}
+
+	ctx := NewContext(r, v)
+
+	ctx.Unify(ctx, v, adt.Finalized)
+	// if err != nil {
+	// 	t.Fatal(err)
+	// }
+
+	t.Error(debug.NodeString(r, v, nil))
+}
diff --git a/internal/core/eval/optionals.go b/internal/core/eval/optionals.go
new file mode 100644
index 0000000..1dbd376
--- /dev/null
+++ b/internal/core/eval/optionals.go
@@ -0,0 +1,236 @@
+// Copyright 2020 CUE Authors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package eval
+
+// TODO: rename this file to fieldset.go
+
+import "cuelang.org/go/internal/core/adt"
+
+// fieldSet represents the fields for a single struct literal, along
+// the constraints of fields that may be added.
+type fieldSet struct {
+	// TODO: look at consecutive identical environments to figure out
+	// what belongs to same definition?
+	env *adt.Environment
+
+	// field marks the optional conjuncts of all explicit fields.
+	// Required fields are marked as empty
+	fields []field
+
+	// literal map[adt.Feature][]adt.Node
+
+	// excluded are all literal fields that already exist.
+	bulk       []bulkField
+	additional []adt.Expr
+	isOpen     bool // has a ...
+}
+
+type field struct {
+	label    adt.Feature
+	optional []adt.Node
+}
+
+type bulkField struct {
+	check fieldMatcher
+	expr  adt.Node // *adt.BulkOptionalField // Conjunct
+}
+
+func (o *fieldSet) Accept(c *adt.OpContext, f adt.Feature) bool {
+	if len(o.additional) > 0 {
+		return true
+	}
+	if o.fieldIndex(f) >= 0 {
+		return true
+	}
+	for _, b := range o.bulk {
+		if b.check.Match(c, f) {
+			return true
+		}
+	}
+	return false
+}
+
+// MatchAndInsert finds matching optional parts for a given Arc and adds its
+// conjuncts. Bulk fields are only applied if no fields match, and additional
+// constraints are only added if neither regular nor bulk fields match.
+func (o *fieldSet) MatchAndInsert(c *adt.OpContext, arc *adt.Vertex) {
+	env := o.env
+
+	// Match normal fields
+	p := 0
+	for ; p < len(o.fields); p++ {
+		if o.fields[p].label == arc.Label {
+			break
+		}
+	}
+	if p < len(o.fields) {
+		for _, e := range o.fields[p].optional {
+			arc.AddConjunct(adt.MakeConjunct(env, e))
+		}
+		return
+	}
+
+	if !arc.Label.IsRegular() {
+		return
+	}
+
+	bulkEnv := *env
+	bulkEnv.DynamicLabel = arc.Label
+
+	// match bulk optional fields / pattern properties
+	matched := false
+	for _, f := range o.bulk {
+		if f.check.Match(c, arc.Label) {
+			matched = true
+			if f.expr != nil {
+				arc.AddConjunct(adt.MakeConjunct(&bulkEnv, f.expr))
+			}
+		}
+	}
+	if matched {
+		return
+	}
+
+	// match others
+	for _, x := range o.additional {
+		arc.AddConjunct(adt.MakeConjunct(env, x))
+	}
+}
+
+func (o *fieldSet) fieldIndex(f adt.Feature) int {
+	for i := range o.fields {
+		if o.fields[i].label == f {
+			return i
+		}
+	}
+	return -1
+}
+
+func (o *fieldSet) MarkField(c *adt.OpContext, x *adt.Field) {
+	if o.fieldIndex(x.Label) < 0 {
+		o.fields = append(o.fields, field{label: x.Label})
+	}
+}
+
+func (o *fieldSet) AddOptional(c *adt.OpContext, x *adt.OptionalField) {
+	p := o.fieldIndex(x.Label)
+	if p < 0 {
+		p = len(o.fields)
+		o.fields = append(o.fields, field{label: x.Label})
+	}
+	o.fields[p].optional = append(o.fields[p].optional, x)
+}
+
+func (o *fieldSet) AddDynamic(c *adt.OpContext, env *adt.Environment, x *adt.DynamicField) {
+	// not in bulk: count as regular field?
+	o.bulk = append(o.bulk, bulkField{dynamicMatcher{env, x.Key}, nil})
+}
+
+func (o *fieldSet) AddBulk(c *adt.OpContext, x *adt.BulkOptionalField) {
+	v, ok := c.Evaluate(o.env, x.Filter)
+	if !ok {
+		// TODO: handle dynamically
+		return
+	}
+	switch f := v.(type) {
+	case *adt.Num:
+		// Just assert an error. Lists have not been expanded yet at
+		// this point, so there is no need to check for existing
+		//fields.
+		l, err := adt.MakeLabel(x.Src, c.Int64(f), adt.IntLabel)
+		if err != nil {
+			c.AddErr(err)
+			return
+		}
+		o.bulk = append(o.bulk, bulkField{labelMatcher(l), x})
+
+	case *adt.Top:
+		o.bulk = append(o.bulk, bulkField{typeMatcher(adt.TopKind), x})
+
+	case *adt.BasicType:
+		o.bulk = append(o.bulk, bulkField{typeMatcher(f.K), x})
+
+	case *adt.String:
+		l := c.Label(f)
+		o.bulk = append(o.bulk, bulkField{labelMatcher(l), x})
+
+	case adt.Validator:
+		o.bulk = append(o.bulk, bulkField{validateMatcher{f}, x})
+
+	default:
+		// TODO(err): not allowed type
+	}
+}
+
+func (o *fieldSet) AddEllipsis(c *adt.OpContext, x *adt.Ellipsis) {
+	expr := x.Value
+	if x.Value == nil {
+		o.isOpen = true
+		expr = &adt.Top{}
+	}
+	o.additional = append(o.additional, expr)
+}
+
+type fieldMatcher interface {
+	Match(c *adt.OpContext, f adt.Feature) bool
+}
+
+type labelMatcher adt.Feature
+
+func (m labelMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+	return adt.Feature(m) == f
+}
+
+type typeMatcher adt.Kind
+
+func (m typeMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+	switch f.Typ() {
+	case adt.StringLabel:
+		return adt.Kind(m)&adt.StringKind != 0
+
+	case adt.IntLabel:
+		return adt.Kind(m)&adt.IntKind != 0
+	}
+	return false
+}
+
+type validateMatcher struct {
+	adt.Validator
+}
+
+func (m validateMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+	v := f.ToValue(c)
+	return c.Validate(m.Validator, v) == nil
+}
+
+type dynamicMatcher struct {
+	env  *adt.Environment
+	expr adt.Expr
+}
+
+func (m dynamicMatcher) Match(c *adt.OpContext, f adt.Feature) bool {
+	if !f.IsRegular() || !f.IsString() {
+		return false
+	}
+	v, ok := c.Evaluate(m.env, m.expr)
+	if !ok {
+		return false
+	}
+	s, ok := v.(*adt.String)
+	if !ok {
+		return false
+	}
+	return f.SelectorString(c) == s.Str
+}