internal/core/eval: rewrite of closedness algorithm

Spec did not change, but simplified algorithm.
Fixes many bugs.

This also adds a new compaction algorithm. This will be
enabled in a follow-up CL.

Also:
- "not allowed" failure is now registered with child. This
allows multiple failures to be recorded naturally, and
also retains more of the correct structure. But more
importantly, it prevents descending into the substructure,
preventing spurious errors.
- Position information has changed slightly, mostly for
the better.

This introduces a regression on trim. See tools/trim/trim_test.go

Fixes #271
Fixes #320
Fixes #370
Fixes #471
Fixes #476
Fixes #483
Fixes #490
Fixes #491
Fixes #493
Fixes #494
Fixes #496
Fixes #497

Change-Id: Ia90ebbbafd8cfc768188d33f109a2e33a4cee922
Reviewed-on: https://cue-review.googlesource.com/c/cue/+/7002
Reviewed-by: Marcel van Lohuizen <mpvl@golang.org>
diff --git a/cmd/cue/cmd/testdata/script/cmd_closed.txt b/cmd/cue/cmd/testdata/script/cmd_closed.txt
new file mode 100644
index 0000000..ba7ab27
--- /dev/null
+++ b/cmd/cue/cmd/testdata/script/cmd_closed.txt
@@ -0,0 +1,20 @@
+cue cmd run
+
+-- task.cue --
+package ci
+
+// Must have
+// - indirection through definition
+// - unification of two list elements
+// - one of those elements must be _
+// - Must use merge and unify tool file
+
+workflows: #Workflow
+
+#Workflow: ["a"] & [_]
+
+-- task_tool.cue --
+package ci
+
+command: run: {
+}
diff --git a/cmd/cue/cmd/testdata/script/issue476.txt b/cmd/cue/cmd/testdata/script/issue476.txt
new file mode 100644
index 0000000..de5fccb
--- /dev/null
+++ b/cmd/cue/cmd/testdata/script/issue476.txt
@@ -0,0 +1,38 @@
+cue cmd dostuffloop
+cmp stdout expect-stdout
+
+-- x_tool.cue --
+package x
+
+import (
+        "tool/cli"
+        "encoding/yaml"
+)
+
+command: dostuff: {
+        write: cli.Print & {
+                text: "yaml is " + yaml.Marshal(w)
+        }
+}
+
+command: dostuffloop: {
+        for w in l {
+                write: cli.Print & {
+                        text: "yaml is " + yaml.Marshal(w)
+                }
+        }
+}
+-- y.cue --
+package x
+
+#Workflow: {
+  #: "working-directory": string
+}
+
+l: [w]
+w: #Workflow & {
+}
+
+-- expect-stdout --
+yaml is {}
+
diff --git a/cmd/cue/cmd/testdata/script/trim.txt b/cmd/cue/cmd/testdata/script/trim.txt
index f4688ef..b5f0cd5 100644
--- a/cmd/cue/cmd/testdata/script/trim.txt
+++ b/cmd/cue/cmd/testdata/script/trim.txt
@@ -44,6 +44,7 @@
 	}
 
 	t: u: {
+		x: 5
 	}
 }
 
diff --git a/cue/testdata/builtins/closed.txtar b/cue/testdata/builtins/closed.txtar
new file mode 100644
index 0000000..19f3dc7
--- /dev/null
+++ b/cue/testdata/builtins/closed.txtar
@@ -0,0 +1,57 @@
+-- in.cue --
+a: close({
+    a: b: int
+})
+
+b: a & { x: int } // err
+c: a & { a: c: int } // okay (non-recursive close)
+
+-- out/eval --
+Errors:
+b: field `x` not allowed:
+    ./in.cue:1:10
+    ./in.cue:5:10
+
+Result:
+(_|_){
+  // [eval]
+  a: (#struct){
+    a: (#struct){
+      b: (int){ int }
+    }
+  }
+  b: (_|_){
+    // [eval]
+    a: (#struct){
+      b: (int){ int }
+    }
+    x: (_|_){
+      // [eval] b: field `x` not allowed:
+      //     ./in.cue:1:10
+      //     ./in.cue:5:10
+    }
+  }
+  c: (#struct){
+    a: (#struct){
+      b: (int){ int }
+      c: (int){ int }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  a: close({
+    a: {
+      b: int
+    }
+  })
+  b: (〈0;a〉 & {
+    x: int
+  })
+  c: (〈0;a〉 & {
+    a: {
+      c: int
+    }
+  })
+}
diff --git a/cue/testdata/builtins/issue490.txtar b/cue/testdata/builtins/issue490.txtar
new file mode 100644
index 0000000..c415f83
--- /dev/null
+++ b/cue/testdata/builtins/issue490.txtar
@@ -0,0 +1,44 @@
+-- test.cue --
+A: close({
+	a: 1
+	b: 2
+})
+
+B: A & {
+	c: 3
+}
+-- out/eval --
+Errors:
+B: field `c` not allowed:
+    ./test.cue:1:10
+    ./test.cue:7:2
+
+Result:
+(_|_){
+  // [eval]
+  A: (#struct){
+    a: (int){ 1 }
+    b: (int){ 2 }
+  }
+  B: (_|_){
+    // [eval]
+    a: (int){ 1 }
+    b: (int){ 2 }
+    c: (_|_){
+      // [eval] B: field `c` not allowed:
+      //     ./test.cue:1:10
+      //     ./test.cue:7:2
+    }
+  }
+}
+-- out/compile --
+--- test.cue
+{
+  A: close({
+    a: 1
+    b: 2
+  })
+  B: (〈0;A〉 & {
+    c: 3
+  })
+}
diff --git a/cue/testdata/definitions/026_combined_definitions.txtar b/cue/testdata/definitions/026_combined_definitions.txtar
index 5da4669..a5e9bc2 100644
--- a/cue/testdata/definitions/026_combined_definitions.txtar
+++ b/cue/testdata/definitions/026_combined_definitions.txtar
@@ -119,13 +119,13 @@
 -- out/eval --
 Errors:
 #D4.env: field `b` not allowed:
+    ./in.cue:26:7
     ./in.cue:27:7
-    ./in.cue:30:1
     ./in.cue:30:6
 d1.env: field `c` not allowed:
-    ./in.cue:2:1
     ./in.cue:3:7
     ./in.cue:4:7
+    ./in.cue:9:5
     ./in.cue:9:17
 
 Result:
@@ -144,14 +144,16 @@
   d1: (_|_){
     // [eval]
     env: (_|_){
-      // [eval] d1.env: field `c` not allowed:
-      //     ./in.cue:2:1
-      //     ./in.cue:3:7
-      //     ./in.cue:4:7
-      //     ./in.cue:9:17
+      // [eval]
       a: (string){ "A" }
       b: (string){ "B" }
-      c: (string){ "C" }
+      c: (_|_){
+        // [eval] d1.env: field `c` not allowed:
+        //     ./in.cue:3:7
+        //     ./in.cue:4:7
+        //     ./in.cue:9:5
+        //     ./in.cue:9:17
+      }
     }
     #def: (#struct){
       a: (string){ "A" }
@@ -171,12 +173,14 @@
   #D4: (_|_){
     // [eval]
     env: (_|_){
-      // [eval] #D4.env: field `b` not allowed:
-      //     ./in.cue:27:7
-      //     ./in.cue:30:1
-      //     ./in.cue:30:6
+      // [eval]
       a: (int){ int }
-      b: (int){ int }
+      b: (_|_){
+        // [eval] #D4.env: field `b` not allowed:
+        //     ./in.cue:26:7
+        //     ./in.cue:27:7
+        //     ./in.cue:30:6
+      }
     }
   }
   #DC: (#struct){
diff --git a/cue/testdata/definitions/032_definitions_with_embedding.txtar b/cue/testdata/definitions/032_definitions_with_embedding.txtar
index 8727fdf..73714a4 100644
--- a/cue/testdata/definitions/032_definitions_with_embedding.txtar
+++ b/cue/testdata/definitions/032_definitions_with_embedding.txtar
@@ -82,9 +82,10 @@
 -- out/eval --
 Errors:
 #e1.a: field `d` not allowed:
-    ./in.cue:1:1
     ./in.cue:2:5
+    ./in.cue:6:2
     ./in.cue:7:5
+    ./in.cue:12:6
     ./in.cue:12:15
 
 Result:
@@ -105,14 +106,17 @@
   #e1: (_|_){
     // [eval]
     a: (_|_){
-      // [eval] #e1.a: field `d` not allowed:
-      //     ./in.cue:1:1
-      //     ./in.cue:2:5
-      //     ./in.cue:7:5
-      //     ./in.cue:12:15
+      // [eval]
       b: (int){ int }
       c: (int){ int }
-      d: (int){ 4 }
+      d: (_|_){
+        // [eval] #e1.a: field `d` not allowed:
+        //     ./in.cue:2:5
+        //     ./in.cue:6:2
+        //     ./in.cue:7:5
+        //     ./in.cue:12:6
+        //     ./in.cue:12:15
+      }
     }
     b: (int){ 3 }
   }
diff --git "a/cue/testdata/definitions/033_Issue_\043153.txtar" "b/cue/testdata/definitions/033_Issue_\043153.txtar"
index d1bee19..c0bb19c 100644
--- "a/cue/testdata/definitions/033_Issue_\043153.txtar"
+++ "b/cue/testdata/definitions/033_Issue_\043153.txtar"
@@ -63,10 +63,10 @@
 -- out/eval --
 Errors:
 listOfCloseds.0: field `b` not allowed:
-    ./in.cue:5:1
+    ./in.cue:2:21
     ./in.cue:5:10
-    ./in.cue:14:18
-    ./in.cue:15:20
+    ./in.cue:13:1
+    ./in.cue:16:4
 
 Result:
 (_|_){
@@ -74,13 +74,15 @@
   listOfCloseds: (_|_){
     // [eval]
     0: (_|_){
-      // [eval] listOfCloseds.0: field `b` not allowed:
-      //     ./in.cue:5:1
-      //     ./in.cue:5:10
-      //     ./in.cue:14:18
-      //     ./in.cue:15:20
+      // [eval]
       a: (int){ |(*(int){ 0 }, (int){ int }) }
-      b: (int){ 2 }
+      b: (_|_){
+        // [eval] listOfCloseds.0: field `b` not allowed:
+        //     ./in.cue:2:21
+        //     ./in.cue:5:10
+        //     ./in.cue:13:1
+        //     ./in.cue:16:4
+      }
     }
   }
   Foo: (struct){
diff --git a/cue/testdata/definitions/037_closing_with_comprehensions.txtar b/cue/testdata/definitions/037_closing_with_comprehensions.txtar
index e71e890..bbc6641 100644
--- a/cue/testdata/definitions/037_closing_with_comprehensions.txtar
+++ b/cue/testdata/definitions/037_closing_with_comprehensions.txtar
@@ -106,14 +106,13 @@
 Errors:
 #D: cannot mix bulk optional fields with dynamic fields, embeddings, or comprehensions within the same struct
 #E: field `f3` not allowed:
-    ./in.cue:1:1
     ./in.cue:1:5
-    ./in.cue:27:10
-    ./in.cue:28:24
+    ./in.cue:27:5
+    ./in.cue:29:3
 a: field `f3` not allowed:
-    ./in.cue:1:1
     ./in.cue:1:5
-    ./in.cue:4:10
+    ./in.cue:4:5
+    ./in.cue:4:11
 
 Result:
 (_|_){
@@ -133,22 +132,25 @@
     f1: (int){ int }
   }
   #E: (_|_){
-    // [eval] #E: field `f3` not allowed:
-    //     ./in.cue:1:1
-    //     ./in.cue:1:5
-    //     ./in.cue:27:10
-    //     ./in.cue:28:24
+    // [eval]
     f1: (int){ int }
     f2: (int){ int }
-    f3: (int){ int }
+    f3: (_|_){
+      // [eval] #E: field `f3` not allowed:
+      //     ./in.cue:1:5
+      //     ./in.cue:27:5
+      //     ./in.cue:29:3
+    }
   }
   a: (_|_){
-    // [eval] a: field `f3` not allowed:
-    //     ./in.cue:1:1
-    //     ./in.cue:1:5
-    //     ./in.cue:4:10
+    // [eval]
     f1: (int){ int }
     f2: (int){ int }
-    f3: (int){ int }
+    f3: (_|_){
+      // [eval] a: field `f3` not allowed:
+      //     ./in.cue:1:5
+      //     ./in.cue:4:5
+      //     ./in.cue:4:11
+    }
   }
 }
diff --git a/cue/testdata/definitions/037_conjunction_of_optional_sets.txtar b/cue/testdata/definitions/037_conjunction_of_optional_sets.txtar
index db72e06..5f98c63 100644
--- a/cue/testdata/definitions/037_conjunction_of_optional_sets.txtar
+++ b/cue/testdata/definitions/037_conjunction_of_optional_sets.txtar
@@ -53,18 +53,20 @@
 -- out/eval --
 Errors:
 c: field `aaa` not allowed:
-    ./in.cue:1:1
-    ./in.cue:1:5
-    ./in.cue:4:1
+    ./in.cue:2:2
     ./in.cue:4:5
-    ./in.cue:9:10
+    ./in.cue:8:5
+    ./in.cue:8:10
+    ./in.cue:9:5
+    ./in.cue:9:11
 d: field `aaa` not allowed:
-    ./in.cue:1:1
-    ./in.cue:1:5
-    ./in.cue:4:1
+    ./in.cue:2:2
     ./in.cue:4:5
     ./in.cue:11:5
-    ./in.cue:12:9
+    ./in.cue:11:6
+    ./in.cue:11:11
+    ./in.cue:12:4
+    ./in.cue:12:10
 
 Result:
 (_|_){
@@ -76,24 +78,30 @@
   #C: (#struct){
   }
   c: (_|_){
-    // [eval] c: field `aaa` not allowed:
-    //     ./in.cue:1:1
-    //     ./in.cue:1:5
-    //     ./in.cue:4:1
-    //     ./in.cue:4:5
-    //     ./in.cue:9:10
-    aaa: (int){ 3 }
+    // [eval]
+    aaa: (_|_){
+      // [eval] c: field `aaa` not allowed:
+      //     ./in.cue:2:2
+      //     ./in.cue:4:5
+      //     ./in.cue:8:5
+      //     ./in.cue:8:10
+      //     ./in.cue:9:5
+      //     ./in.cue:9:11
+    }
   }
   #D: (#struct){
   }
   d: (_|_){
-    // [eval] d: field `aaa` not allowed:
-    //     ./in.cue:1:1
-    //     ./in.cue:1:5
-    //     ./in.cue:4:1
-    //     ./in.cue:4:5
-    //     ./in.cue:11:5
-    //     ./in.cue:12:9
-    aaa: (int){ 3 }
+    // [eval]
+    aaa: (_|_){
+      // [eval] d: field `aaa` not allowed:
+      //     ./in.cue:2:2
+      //     ./in.cue:4:5
+      //     ./in.cue:11:5
+      //     ./in.cue:11:6
+      //     ./in.cue:11:11
+      //     ./in.cue:12:4
+      //     ./in.cue:12:10
+    }
   }
 }
diff --git a/cue/testdata/definitions/038_continue_recursive_closing_for_optionals.txtar b/cue/testdata/definitions/038_continue_recursive_closing_for_optionals.txtar
index af888a7..44e2052 100644
--- a/cue/testdata/definitions/038_continue_recursive_closing_for_optionals.txtar
+++ b/cue/testdata/definitions/038_continue_recursive_closing_for_optionals.txtar
@@ -39,9 +39,9 @@
 -- out/eval --
 Errors:
 a.v: field `b` not allowed:
-    ./in.cue:1:1
     ./in.cue:2:12
-    ./in.cue:5:5
+    ./in.cue:4:4
+    ./in.cue:5:6
 
 Result:
 (_|_){
@@ -51,11 +51,13 @@
   a: (_|_){
     // [eval]
     v: (_|_){
-      // [eval] a.v: field `b` not allowed:
-      //     ./in.cue:1:1
-      //     ./in.cue:2:12
-      //     ./in.cue:5:5
-      b: (int){ int }
+      // [eval]
+      b: (_|_){
+        // [eval] a.v: field `b` not allowed:
+        //     ./in.cue:2:12
+        //     ./in.cue:4:4
+        //     ./in.cue:5:6
+      }
       a: (int){ int }
     }
   }
diff --git a/cue/testdata/definitions/embed.txtar b/cue/testdata/definitions/embed.txtar
new file mode 100644
index 0000000..42779aa
--- /dev/null
+++ b/cue/testdata/definitions/embed.txtar
@@ -0,0 +1,56 @@
+-- in.cue --
+deployment: [string]: #Deployment
+
+deployment: foo: spec: replicas: 1
+
+#Deployment: {
+	#TypeMeta
+
+	spec: #Spec
+}
+
+#Spec: replicas: int
+
+#TypeMeta: {}
+-- out/eval --
+(struct){
+  deployment: (struct){
+    foo: (#struct){
+      spec: (#struct){
+        replicas: (int){ 1 }
+      }
+    }
+  }
+  #Deployment: (#struct){
+    spec: (#struct){
+      replicas: (int){ int }
+    }
+  }
+  #Spec: (#struct){
+    replicas: (int){ int }
+  }
+  #TypeMeta: (#struct){
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  deployment: {
+    [string]: 〈1;#Deployment〉
+  }
+  deployment: {
+    foo: {
+      spec: {
+        replicas: 1
+      }
+    }
+  }
+  #Deployment: {
+    〈1;#TypeMeta〉
+    spec: 〈1;#Spec〉
+  }
+  #Spec: {
+    replicas: int
+  }
+  #TypeMeta: {}
+}
diff --git a/cue/testdata/definitions/hidden.txtar b/cue/testdata/definitions/hidden.txtar
new file mode 100644
index 0000000..5f900ee
--- /dev/null
+++ b/cue/testdata/definitions/hidden.txtar
@@ -0,0 +1,52 @@
+cue eval ./pkg:foo
+
+-- cue.mod/module.cue --
+module: "example.com"
+
+-- in.cue --
+package foo
+
+import "example.com/pkg"
+
+#def: {
+  _name: d: int
+}
+
+d: pkg.#D & { _name: d: int }
+
+// TODO: this should fail, as the _name restricting it is in this
+// package.
+// e: pkg.#D & #def & { _name: e: int }
+
+-- pkg/bar.cue --
+package pkg
+
+#D: {}
+
+-- out/eval --
+(struct){
+  #def: (#struct){
+    _name: (#struct){
+      d: (int){ int }
+    }
+  }
+  d: (#struct){
+    _name: (#struct){
+      d: (int){ int }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #def: {
+    _name: {
+      d: int
+    }
+  }
+  d: (〈import;"example.com/pkg"〉.#D & {
+    _name: {
+      d: int
+    }
+  })
+}
diff --git a/cue/testdata/definitions/issue271.txtar b/cue/testdata/definitions/issue271.txtar
new file mode 100644
index 0000000..0d2f296
--- /dev/null
+++ b/cue/testdata/definitions/issue271.txtar
@@ -0,0 +1,49 @@
+-- in.cue --
+#T: [_]: _
+#T: close({"a": string})
+x: #T
+x: {
+	a: "hello"
+	b: "foo"
+}
+-- out/eval --
+Errors:
+x: field `b` not allowed:
+    ./in.cue:1:5
+    ./in.cue:2:11
+    ./in.cue:3:4
+    ./in.cue:6:2
+
+Result:
+(_|_){
+  // [eval]
+  #T: (#struct){
+    a: (string){ string }
+  }
+  x: (_|_){
+    // [eval]
+    a: (string){ "hello" }
+    b: (_|_){
+      // [eval] x: field `b` not allowed:
+      //     ./in.cue:1:5
+      //     ./in.cue:2:11
+      //     ./in.cue:3:4
+      //     ./in.cue:6:2
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #T: {
+    [_]: _
+  }
+  #T: close({
+    a: string
+  })
+  x: 〈0;#T〉
+  x: {
+    a: "hello"
+    b: "foo"
+  }
+}
diff --git a/cue/testdata/definitions/issue320.txtar b/cue/testdata/definitions/issue320.txtar
new file mode 100644
index 0000000..be99ac6
--- /dev/null
+++ b/cue/testdata/definitions/issue320.txtar
@@ -0,0 +1,57 @@
+-- in.cue --
+#Foo: {
+	x: string
+	#More
+}
+
+#More: [=~ "^x-"]: _
+
+foo: #Foo & {
+	x: "hello"
+	y: "goodbye"
+}
+-- out/eval --
+Errors:
+foo: field `y` not allowed:
+    ./in.cue:1:7
+    ./in.cue:3:2
+    ./in.cue:6:8
+    ./in.cue:8:6
+    ./in.cue:10:2
+
+Result:
+(_|_){
+  // [eval]
+  #Foo: (#struct){
+    x: (string){ string }
+  }
+  #More: (#struct){
+  }
+  foo: (_|_){
+    // [eval]
+    x: (string){ "hello" }
+    y: (_|_){
+      // [eval] foo: field `y` not allowed:
+      //     ./in.cue:1:7
+      //     ./in.cue:3:2
+      //     ./in.cue:6:8
+      //     ./in.cue:8:6
+      //     ./in.cue:10:2
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #Foo: {
+    x: string
+    〈1;#More〉
+  }
+  #More: {
+    [=~"^x-"]: _
+  }
+  foo: (〈0;#Foo〉 & {
+    x: "hello"
+    y: "goodbye"
+  })
+}
diff --git a/cue/testdata/definitions/issue370.txtar b/cue/testdata/definitions/issue370.txtar
index 17bb590..a4a962d 100644
--- a/cue/testdata/definitions/issue370.txtar
+++ b/cue/testdata/definitions/issue370.txtar
@@ -17,7 +17,7 @@
 
 c2: #C2 & {
   c1
-  //age: 5
+  age: 5
 }
 -- out/eval --
 (struct){
@@ -33,7 +33,7 @@
   }
   c2: (#struct){
     name: (string){ "cueckoo" }
-    age: (int){ int }
+    age: (int){ 5 }
   }
 }
 -- out/compile --
@@ -51,5 +51,6 @@
   })
   c2: (〈0;#C2〉 & {
     〈1;c1〉
+    age: 5
   })
 }
diff --git a/cue/testdata/definitions/issue419.txtar b/cue/testdata/definitions/issue419.txtar
index a620500..e23c6f0 100644
--- a/cue/testdata/definitions/issue419.txtar
+++ b/cue/testdata/definitions/issue419.txtar
@@ -4,7 +4,7 @@
 }
 
 #B: {
-	_b: string
+	b: string
 }
 
 #X: #A | #B
@@ -12,7 +12,7 @@
 l: [...#X]
 
 l: [
-	{_b: "bar"}
+	{b: "bar"}
 ]
 -- out/eval --
 (struct){
@@ -20,16 +20,16 @@
     a: (string){ string }
   }
   #B: (#struct){
-    _b: (string){ string }
+    b: (string){ string }
   }
   #X: (struct){ |((#struct){
       a: (string){ string }
     }, (#struct){
-      _b: (string){ string }
+      b: (string){ string }
     }) }
   l: (#list){
     0: (#struct){
-      _b: (string){ "bar" }
+      b: (string){ "bar" }
     }
   }
 }
@@ -40,7 +40,7 @@
     a: string
   }
   #B: {
-    _b: string
+    b: string
   }
   #X: (〈0;#A〉|〈0;#B〉)
   l: [
@@ -48,7 +48,7 @@
   ]
   l: [
     {
-      _b: "bar"
+      b: "bar"
     },
   ]
 }
diff --git a/cue/testdata/definitions/issue471.txtar b/cue/testdata/definitions/issue471.txtar
new file mode 100644
index 0000000..2baf609
--- /dev/null
+++ b/cue/testdata/definitions/issue471.txtar
@@ -0,0 +1,71 @@
+-- in.cue --
+package x
+
+#a: (#c | #d) & {
+	name: string
+}
+
+#a1: #c & {
+	name: string
+}
+
+#a2: #d & {
+	name: string
+}
+
+#c: {
+	name: string
+	age:  int
+}
+
+#d: {
+	name:    string
+	address: string
+}
+-- out/eval --
+(struct){
+  #a: (struct){ |((#struct){
+      name: (string){ string }
+      age: (int){ int }
+    }, (#struct){
+      name: (string){ string }
+      address: (string){ string }
+    }) }
+  #a1: (#struct){
+    name: (string){ string }
+    age: (int){ int }
+  }
+  #a2: (#struct){
+    name: (string){ string }
+    address: (string){ string }
+  }
+  #c: (#struct){
+    name: (string){ string }
+    age: (int){ int }
+  }
+  #d: (#struct){
+    name: (string){ string }
+    address: (string){ string }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #a: ((〈0;#c〉|〈0;#d〉) & {
+    name: string
+  })
+  #a1: (〈0;#c〉 & {
+    name: string
+  })
+  #a2: (〈0;#d〉 & {
+    name: string
+  })
+  #c: {
+    name: string
+    age: int
+  }
+  #d: {
+    name: string
+    address: string
+  }
+}
diff --git a/cue/testdata/definitions/issue483.txtar b/cue/testdata/definitions/issue483.txtar
new file mode 100644
index 0000000..769980a
--- /dev/null
+++ b/cue/testdata/definitions/issue483.txtar
@@ -0,0 +1,56 @@
+-- in.cue --
+out: {
+        instance
+}
+instance: #Type & {
+        alpha: bravo: charlie: true
+}
+#Type: #Root & {
+        alpha?: bravo?: charlie?: bool
+}
+#Root: { ... }
+-- out/eval --
+(struct){
+  out: (#struct){
+    alpha: (#struct){
+      bravo: (#struct){
+        charlie: (bool){ true }
+      }
+    }
+  }
+  instance: (#struct){
+    alpha: (#struct){
+      bravo: (#struct){
+        charlie: (bool){ true }
+      }
+    }
+  }
+  #Type: (#struct){
+  }
+  #Root: (#struct){
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  out: {
+    〈1;instance〉
+  }
+  instance: (〈0;#Type〉 & {
+    alpha: {
+      bravo: {
+        charlie: true
+      }
+    }
+  })
+  #Type: (〈0;#Root〉 & {
+    alpha?: {
+      bravo?: {
+        charlie?: bool
+      }
+    }
+  })
+  #Root: {
+    ...
+  }
+}
diff --git a/cue/testdata/definitions/issue491.txtar b/cue/testdata/definitions/issue491.txtar
new file mode 100644
index 0000000..9c4d675
--- /dev/null
+++ b/cue/testdata/definitions/issue491.txtar
@@ -0,0 +1,115 @@
+-- in.cue --
+package x
+
+#Prestep: {
+	Args: *null | _
+}
+
+#PrestepNewUser: #Prestep & {
+	Args: #NewUser
+}
+
+#NewUser: {
+	Repos: [...#Repo]
+}
+
+#Repo: {
+	Var:     string
+	Pattern: *"*" | string
+}
+
+x: [...#Repo]
+x: [{
+	Var: "REPO1"
+}]
+
+y: #Repo & {
+	Var: "REPO1"
+}
+
+z: #PrestepNewUser & {
+	Args: {
+		Repos: [ {
+			Var: "REPO1"
+		}]
+	}
+}
+-- out/eval --
+(struct){
+  #Prestep: (#struct){
+    Args: (_){ |(*(null){ null }, (_){ _ }) }
+  }
+  #PrestepNewUser: (#struct){
+    Args: (#struct){
+      Repos: (list){
+      }
+    }
+  }
+  #NewUser: (#struct){
+    Repos: (list){
+    }
+  }
+  #Repo: (#struct){
+    Var: (string){ string }
+    Pattern: (string){ |(*(string){ "*" }, (string){ string }) }
+  }
+  x: (#list){
+    0: (#struct){
+      Var: (string){ "REPO1" }
+      Pattern: (string){ |(*(string){ "*" }, (string){ string }) }
+    }
+  }
+  y: (#struct){
+    Var: (string){ "REPO1" }
+    Pattern: (string){ |(*(string){ "*" }, (string){ string }) }
+  }
+  z: (#struct){
+    Args: (#struct){
+      Repos: (#list){
+        0: (#struct){
+          Var: (string){ "REPO1" }
+          Pattern: (string){ |(*(string){ "*" }, (string){ string }) }
+        }
+      }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #Prestep: {
+    Args: (*null|_)
+  }
+  #PrestepNewUser: (〈0;#Prestep〉 & {
+    Args: 〈1;#NewUser〉
+  })
+  #NewUser: {
+    Repos: [
+      ...〈1;#Repo〉,
+    ]
+  }
+  #Repo: {
+    Var: string
+    Pattern: (*"*"|string)
+  }
+  x: [
+    ...〈0;#Repo〉,
+  ]
+  x: [
+    {
+      Var: "REPO1"
+    },
+  ]
+  y: (〈0;#Repo〉 & {
+    Var: "REPO1"
+  })
+  z: (〈0;#PrestepNewUser〉 & {
+    Args: {
+      Repos: [
+        {
+          Var: "REPO1"
+        },
+      ]
+    }
+  })
+}
diff --git a/cue/testdata/definitions/issue493.txtar b/cue/testdata/definitions/issue493.txtar
new file mode 100644
index 0000000..da75387
--- /dev/null
+++ b/cue/testdata/definitions/issue493.txtar
@@ -0,0 +1,56 @@
+-- in.cue --
+#Artifact: {
+  body: _
+  other: [string]: int
+}
+
+#App:  #Artifact
+#Atom: #Artifact
+
+#Both: #App | #Atom
+
+t1: #Both  & {body: 3}
+-- out/eval --
+(struct){
+  #Artifact: (#struct){
+    body: (_){ _ }
+    other: (#struct){
+    }
+  }
+  #App: (#struct){
+    body: (_){ _ }
+    other: (#struct){
+    }
+  }
+  #Atom: (#struct){
+    body: (_){ _ }
+    other: (#struct){
+    }
+  }
+  #Both: (#struct){
+    body: (_){ _ }
+    other: (#struct){
+    }
+  }
+  t1: (#struct){
+    body: (int){ 3 }
+    other: (#struct){
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #Artifact: {
+    body: _
+    other: {
+      [string]: int
+    }
+  }
+  #App: 〈0;#Artifact〉
+  #Atom: 〈0;#Artifact〉
+  #Both: (〈0;#App〉|〈0;#Atom〉)
+  t1: (〈0;#Both〉 & {
+    body: 3
+  })
+}
diff --git a/cue/testdata/definitions/issue496.txtar b/cue/testdata/definitions/issue496.txtar
new file mode 100644
index 0000000..aefa15e
--- /dev/null
+++ b/cue/testdata/definitions/issue496.txtar
@@ -0,0 +1,36 @@
+-- in.cue --
+#A : _
+
+#N: #A & {
+  _E: {
+    name: "hello"
+  }
+}
+
+l: #N
+
+-- out/eval --
+(struct){
+  #A: (_){ _ }
+  #N: (#struct){
+    _E: (#struct){
+      name: (string){ "hello" }
+    }
+  }
+  l: (#struct){
+    _E: (#struct){
+      name: (string){ "hello" }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #A: _
+  #N: (〈0;#A〉 & {
+    _E: {
+      name: "hello"
+    }
+  })
+  l: 〈0;#N〉
+}
diff --git a/cue/testdata/definitions/issue497.txtar b/cue/testdata/definitions/issue497.txtar
new file mode 100644
index 0000000..8cae784
--- /dev/null
+++ b/cue/testdata/definitions/issue497.txtar
@@ -0,0 +1,42 @@
+-- in.cue --
+#A : _
+
+#N: #A & {
+  f: j: {
+    n: "hi"
+  }
+}
+
+l: #N
+
+-- out/eval --
+(struct){
+  #A: (_){ _ }
+  #N: (#struct){
+    f: (#struct){
+      j: (#struct){
+        n: (string){ "hi" }
+      }
+    }
+  }
+  l: (#struct){
+    f: (#struct){
+      j: (#struct){
+        n: (string){ "hi" }
+      }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #A: _
+  #N: (〈0;#A〉 & {
+    f: {
+      j: {
+        n: "hi"
+      }
+    }
+  })
+  l: 〈0;#N〉
+}
diff --git a/cue/testdata/definitions/visibility.txtar b/cue/testdata/definitions/visibility.txtar
new file mode 100644
index 0000000..3ce9b7d
--- /dev/null
+++ b/cue/testdata/definitions/visibility.txtar
@@ -0,0 +1,35 @@
+-- in.cue --
+#foo: {
+	name: string
+}
+foo: #foo & {
+    // These should all be allowed.
+    _name: "foo"
+    _#name: "bar"
+
+    #name: "baz" // TODO: this should not be allowed.
+}
+-- out/eval --
+(struct){
+  #foo: (#struct){
+    name: (string){ string }
+  }
+  foo: (#struct){
+    name: (string){ string }
+    _name: (string){ "foo" }
+    _#name: (string){ "bar" }
+    #name: (string){ "baz" }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  #foo: {
+    name: string
+  }
+  foo: (〈0;#foo〉 & {
+    _name: "foo"
+    _#name: "bar"
+    #name: "baz"
+  })
+}
diff --git a/cue/testdata/eval/bulk.txtar b/cue/testdata/eval/bulk.txtar
index dd0a305..df71f2b 100644
--- a/cue/testdata/eval/bulk.txtar
+++ b/cue/testdata/eval/bulk.txtar
@@ -31,13 +31,13 @@
 -- out/eval --
 Errors:
 t1.c: field `z` not allowed:
-    ./in.cue:15:3
     ./in.cue:15:7
-    ./in.cue:19:11
+    ./in.cue:19:6
+    ./in.cue:19:13
 t2.c: field `z` not allowed:
-    ./in.cue:23:3
     ./in.cue:23:7
-    ./in.cue:27:11
+    ./in.cue:27:6
+    ./in.cue:27:13
 
 Result:
 (_|_){
@@ -65,11 +65,13 @@
       f: (int){ 4 }
     }
     c: (_|_){
-      // [eval] t1.c: field `z` not allowed:
-      //     ./in.cue:15:3
-      //     ./in.cue:15:7
-      //     ./in.cue:19:11
-      z: (int){ 4 }
+      // [eval]
+      z: (_|_){
+        // [eval] t1.c: field `z` not allowed:
+        //     ./in.cue:15:7
+        //     ./in.cue:19:6
+        //     ./in.cue:19:13
+      }
     }
   }
   t2: (_|_){
@@ -80,11 +82,13 @@
       x: (int){ 4 }
     }
     c: (_|_){
-      // [eval] t2.c: field `z` not allowed:
-      //     ./in.cue:23:3
-      //     ./in.cue:23:7
-      //     ./in.cue:27:11
-      z: (int){ 4 }
+      // [eval]
+      z: (_|_){
+        // [eval] t2.c: field `z` not allowed:
+        //     ./in.cue:23:7
+        //     ./in.cue:27:6
+        //     ./in.cue:27:13
+      }
     }
   }
 }
diff --git a/cue/testdata/eval/closed_disjunction.txtar b/cue/testdata/eval/closed_disjunction.txtar
index 78e6872..5d8c552 100644
--- a/cue/testdata/eval/closed_disjunction.txtar
+++ b/cue/testdata/eval/closed_disjunction.txtar
@@ -15,10 +15,18 @@
 }
 -- out/eval --
 Errors:
+b: field `c` not allowed:
+    ./in.cue:1:5
+    ./in.cue:3:5
+    ./in.cue:3:35
+    ./in.cue:11:4
+    ./in.cue:12:5
 b: field `d` not allowed:
     ./in.cue:1:5
+    ./in.cue:3:5
     ./in.cue:3:35
-    ./in.cue:11:9
+    ./in.cue:11:4
+    ./in.cue:13:5
 
 Result:
 (_|_){
@@ -30,12 +38,23 @@
     c: (int){ 3 }
   }
   b: (_|_){
-    // [eval] b: field `d` not allowed:
-    //     ./in.cue:1:5
-    //     ./in.cue:3:35
-    //     ./in.cue:11:9
-    c: (int){ 3 }
-    d: (int){ 4 }
+    // [eval]
+    c: (_|_){
+      // [eval] b: field `c` not allowed:
+      //     ./in.cue:1:5
+      //     ./in.cue:3:5
+      //     ./in.cue:3:35
+      //     ./in.cue:11:4
+      //     ./in.cue:12:5
+    }
+    d: (_|_){
+      // [eval] b: field `d` not allowed:
+      //     ./in.cue:1:5
+      //     ./in.cue:3:5
+      //     ./in.cue:3:35
+      //     ./in.cue:11:4
+      //     ./in.cue:13:5
+    }
   }
 }
 -- out/compile --
diff --git a/cue/testdata/eval/closedness.txtar b/cue/testdata/eval/closedness.txtar
index 2023486..db75799 100644
--- a/cue/testdata/eval/closedness.txtar
+++ b/cue/testdata/eval/closedness.txtar
@@ -19,10 +19,11 @@
 -- out/eval --
 Errors:
 a.q: field `e` not allowed:
-    ./in.cue:1:1
     ./in.cue:1:5
     ./in.cue:6:8
-    ./in.cue:13:8
+    ./in.cue:7:9
+    ./in.cue:11:4
+    ./in.cue:15:9
 
 Result:
 (_|_){
@@ -41,14 +42,17 @@
     // [eval]
     b: (int){ 3 }
     q: (_|_){
-      // [eval] a.q: field `e` not allowed:
-      //     ./in.cue:1:1
-      //     ./in.cue:1:5
-      //     ./in.cue:6:8
-      //     ./in.cue:13:8
+      // [eval]
       c: (int){ 2 }
       d: (int){ int }
-      e: (int){ 43 }
+      e: (_|_){
+        // [eval] a.q: field `e` not allowed:
+        //     ./in.cue:1:5
+        //     ./in.cue:6:8
+        //     ./in.cue:7:9
+        //     ./in.cue:11:4
+        //     ./in.cue:15:9
+      }
     }
   }
 }
diff --git a/cue/testdata/eval/issue494.txtar b/cue/testdata/eval/issue494.txtar
new file mode 100644
index 0000000..45a174b
--- /dev/null
+++ b/cue/testdata/eval/issue494.txtar
@@ -0,0 +1,115 @@
+-- in.cue --
+_Q : [{pos: 0},{pos: 1}]
+
+a: [rn=string]:  _Q[0:len(a[rn])]
+a: ben: [{}]
+
+b: [rn=string]: _Q[0:1]
+b: ben: [{}]
+
+c: [rn=string]: [...{l: len(a[rn])}]
+c: ben: [{}]
+
+#d: [rn=string]:  [...{pos:uint}] & _Q[0:len(#d[rn])]
+#d: ben: [{}]
+
+d: #d
+-- out/eval --
+(struct){
+  _Q: (#list){
+    0: (struct){
+      pos: (int){ 0 }
+    }
+    1: (struct){
+      pos: (int){ 1 }
+    }
+  }
+  a: (struct){
+    ben: (#list){
+      0: (struct){
+        pos: (int){ 0 }
+      }
+    }
+  }
+  b: (struct){
+    ben: (#list){
+      0: (struct){
+        pos: (int){ 0 }
+      }
+    }
+  }
+  c: (struct){
+    ben: (#list){
+      0: (struct){
+        l: (int){ 1 }
+      }
+    }
+  }
+  #d: (#struct){
+    ben: (#list){
+      0: (#struct){
+        pos: (int){ 0 }
+      }
+    }
+  }
+  d: (#struct){
+    ben: (#list){
+      0: (#struct){
+        pos: (int){ 0 }
+      }
+    }
+  }
+}
+-- out/compile --
+--- in.cue
+{
+  _Q: [
+    {
+      pos: 0
+    },
+    {
+      pos: 1
+    },
+  ]
+  a: {
+    [string]: 〈1;_Q〉[0:len(〈1;a〉[〈0;-〉])]
+  }
+  a: {
+    ben: [
+      {},
+    ]
+  }
+  b: {
+    [string]: 〈1;_Q〉[0:1]
+  }
+  b: {
+    ben: [
+      {},
+    ]
+  }
+  c: {
+    [string]: [
+      ...{
+        l: len(〈2;a〉[〈1;-〉])
+      },
+    ]
+  }
+  c: {
+    ben: [
+      {},
+    ]
+  }
+  #d: {
+    [string]: ([
+      ...{
+        pos: &(int, >=0)
+      },
+    ] & 〈1;_Q〉[0:len(〈1;#d〉[〈0;-〉])])
+  }
+  #d: {
+    ben: [
+      {},
+    ]
+  }
+  d: 〈0;#d〉
+}
diff --git a/cue/testdata/export/007.txtar b/cue/testdata/export/007.txtar
index d69ae88..d1dd776 100644
--- a/cue/testdata/export/007.txtar
+++ b/cue/testdata/export/007.txtar
@@ -1,7 +1,11 @@
-# DO NOT EDIT; generated by go run testdata/gen.go
-#
 -- in.cue --
-{#a: {b: 2.0, s: "abc"}, b: #a.b, c: #a.c, d: #a["d"], e: #a.t[2:3]}
+{
+  #a: {b: 2.0, s: "abc"}
+  b: #a.b
+  c: #a.c
+  d: #a["d"]
+  e: #a.t[2:3]
+}
 -- out/def --
 #a: {
 	b: 2.0
@@ -28,11 +32,11 @@
 -- out/eval --
 Errors:
 c: undefined field c:
-    ./in.cue:1:41
+    ./in.cue:4:9
 d: undefined field d:
-    ./in.cue:1:50
+    ./in.cue:5:9
 e: undefined field t:
-    ./in.cue:1:62
+    ./in.cue:6:9
 
 Result:
 (_|_){
@@ -44,14 +48,14 @@
   b: (float){ 2.0 }
   c: (_|_){
     // [eval] c: undefined field c:
-    //     ./in.cue:1:41
+    //     ./in.cue:4:9
   }
   d: (_|_){
     // [eval] d: undefined field d:
-    //     ./in.cue:1:50
+    //     ./in.cue:5:9
   }
   e: (_|_){
     // [eval] e: undefined field t:
-    //     ./in.cue:1:62
+    //     ./in.cue:6:9
   }
 }
diff --git a/cue/testdata/fulleval/035_optionals_with_label_filters.txtar b/cue/testdata/fulleval/035_optionals_with_label_filters.txtar
index e9c1cf8..aae6258 100644
--- a/cue/testdata/fulleval/035_optionals_with_label_filters.txtar
+++ b/cue/testdata/fulleval/035_optionals_with_label_filters.txtar
@@ -106,11 +106,13 @@
     ./in.cue:6:8
     ./in.cue:7:2
     ./in.cue:9:2
+    ./in.cue:15:17
     ./in.cue:16:8
 jobs3: field `fooTest1` not allowed:
     ./in.cue:6:8
     ./in.cue:7:2
     ./in.cue:9:2
+    ./in.cue:21:8
     ./in.cue:22:8
     ./in.cue:23:8
 jobs2.fooTest.name: invalid value "badName" (out of bound =~"^test"):
@@ -132,12 +134,14 @@
     }
   }
   jobs1: (_|_){
-    // [eval] jobs1: field `foo1` not allowed:
-    //     ./in.cue:6:8
-    //     ./in.cue:7:2
-    //     ./in.cue:9:2
-    //     ./in.cue:16:8
-    foo1: (#struct){
+    // [eval]
+    foo1: (_|_){
+      // [eval] jobs1: field `foo1` not allowed:
+      //     ./in.cue:6:8
+      //     ./in.cue:7:2
+      //     ./in.cue:9:2
+      //     ./in.cue:15:17
+      //     ./in.cue:16:8
     }
   }
   jobs2: (_|_){
@@ -152,15 +156,15 @@
     }
   }
   jobs3: (_|_){
-    // [eval] jobs3: field `fooTest1` not allowed:
-    //     ./in.cue:6:8
-    //     ./in.cue:7:2
-    //     ./in.cue:9:2
-    //     ./in.cue:22:8
-    //     ./in.cue:23:8
-    fooTest1: (#struct){
-      name: (string){ "badName" }
-      cmd: (string){ string }
+    // [eval]
+    fooTest1: (_|_){
+      // [eval] jobs3: field `fooTest1` not allowed:
+      //     ./in.cue:6:8
+      //     ./in.cue:7:2
+      //     ./in.cue:9:2
+      //     ./in.cue:21:8
+      //     ./in.cue:22:8
+      //     ./in.cue:23:8
     }
   }
 }
diff --git a/cue/testdata/resolve/025_definitions.txtar b/cue/testdata/resolve/025_definitions.txtar
index 9bb0833..ad7ec04 100644
--- a/cue/testdata/resolve/025_definitions.txtar
+++ b/cue/testdata/resolve/025_definitions.txtar
@@ -120,13 +120,13 @@
 -- out/eval --
 Errors:
 foo: field `feild` not allowed:
-    ./in.cue:1:1
     ./in.cue:1:7
-    ./in.cue:13:6
+    ./in.cue:12:6
+    ./in.cue:13:7
 foo1.recursive: field `feild` not allowed:
-    ./in.cue:1:1
     ./in.cue:3:13
-    ./in.cue:18:13
+    ./in.cue:15:7
+    ./in.cue:19:3
 
 Result:
 (_|_){
@@ -142,26 +142,30 @@
     field2: (string){ string }
   }
   foo: (_|_){
-    // [eval] foo: field `feild` not allowed:
-    //     ./in.cue:1:1
-    //     ./in.cue:1:7
-    //     ./in.cue:13:6
+    // [eval]
     field: (int){ int }
     recursive: (#struct){
       field: (string){ string }
     }
-    feild: (int){ 2 }
+    feild: (_|_){
+      // [eval] foo: field `feild` not allowed:
+      //     ./in.cue:1:7
+      //     ./in.cue:12:6
+      //     ./in.cue:13:7
+    }
   }
   foo1: (_|_){
     // [eval]
     field: (int){ 2 }
     recursive: (_|_){
-      // [eval] foo1.recursive: field `feild` not allowed:
-      //     ./in.cue:1:1
-      //     ./in.cue:3:13
-      //     ./in.cue:18:13
+      // [eval]
       field: (string){ string }
-      feild: (int){ 2 }
+      feild: (_|_){
+        // [eval] foo1.recursive: field `feild` not allowed:
+        //     ./in.cue:3:13
+        //     ./in.cue:15:7
+        //     ./in.cue:19:3
+      }
     }
   }
   #Bar: (#struct){
diff --git a/cue/testdata/resolve/029_non-closed_definition_carries_over_closedness_to_enclosed_template.txtar b/cue/testdata/resolve/029_non-closed_definition_carries_over_closedness_to_enclosed_template.txtar
index 69be4d5..d613717 100644
--- a/cue/testdata/resolve/029_non-closed_definition_carries_over_closedness_to_enclosed_template.txtar
+++ b/cue/testdata/resolve/029_non-closed_definition_carries_over_closedness_to_enclosed_template.txtar
@@ -105,17 +105,17 @@
 -- out/eval --
 Errors:
 a.v: field `b` not allowed:
-    ./in.cue:1:1
     ./in.cue:2:12
-    ./in.cue:5:5
+    ./in.cue:4:4
+    ./in.cue:5:6
 b.w: field `c` not allowed:
-    ./in.cue:7:1
     ./in.cue:8:23
-    ./in.cue:11:5
+    ./in.cue:10:4
+    ./in.cue:11:6
 c.w.0: field `d` not allowed:
-    ./in.cue:13:1
     ./in.cue:14:13
-    ./in.cue:17:6
+    ./in.cue:16:4
+    ./in.cue:17:7
 
 Result:
 (_|_){
@@ -125,11 +125,13 @@
   a: (_|_){
     // [eval]
     v: (_|_){
-      // [eval] a.v: field `b` not allowed:
-      //     ./in.cue:1:1
-      //     ./in.cue:2:12
-      //     ./in.cue:5:5
-      b: (int){ int }
+      // [eval]
+      b: (_|_){
+        // [eval] a.v: field `b` not allowed:
+        //     ./in.cue:2:12
+        //     ./in.cue:4:4
+        //     ./in.cue:5:6
+      }
       a: (int){ int }
     }
   }
@@ -138,11 +140,13 @@
   b: (_|_){
     // [eval]
     w: (_|_){
-      // [eval] b.w: field `c` not allowed:
-      //     ./in.cue:7:1
-      //     ./in.cue:8:23
-      //     ./in.cue:11:5
-      c: (int){ int }
+      // [eval]
+      c: (_|_){
+        // [eval] b.w: field `c` not allowed:
+        //     ./in.cue:8:23
+        //     ./in.cue:10:4
+        //     ./in.cue:11:6
+      }
       b: (int){ int }
     }
   }
@@ -153,11 +157,13 @@
     w: (_|_){
       // [eval]
       0: (_|_){
-        // [eval] c.w.0: field `d` not allowed:
-        //     ./in.cue:13:1
-        //     ./in.cue:14:13
-        //     ./in.cue:17:6
-        d: (int){ int }
+        // [eval]
+        d: (_|_){
+          // [eval] c.w.0: field `d` not allowed:
+          //     ./in.cue:14:13
+          //     ./in.cue:16:4
+          //     ./in.cue:17:7
+        }
         a: (int){ int }
       }
       1: (#struct){
diff --git a/cue/testdata/resolve/030_definitions_with_disjunctions.txtar b/cue/testdata/resolve/030_definitions_with_disjunctions.txtar
index d21c4cd..b90ba43 100644
--- a/cue/testdata/resolve/030_definitions_with_disjunctions.txtar
+++ b/cue/testdata/resolve/030_definitions_with_disjunctions.txtar
@@ -63,8 +63,10 @@
 Errors:
 bar: field `c` not allowed:
     ./in.cue:1:7
+    ./in.cue:4:2
     ./in.cue:5:2
-    ./in.cue:12:6
+    ./in.cue:11:6
+    ./in.cue:12:7
 
 Result:
 (_|_){
@@ -81,12 +83,16 @@
     a: (int){ 1 }
   }
   bar: (_|_){
-    // [eval] bar: field `c` not allowed:
-    //     ./in.cue:1:7
-    //     ./in.cue:5:2
-    //     ./in.cue:12:6
+    // [eval]
     field: (int){ int }
-    c: (int){ 2 }
+    c: (_|_){
+      // [eval] bar: field `c` not allowed:
+      //     ./in.cue:1:7
+      //     ./in.cue:4:2
+      //     ./in.cue:5:2
+      //     ./in.cue:11:6
+      //     ./in.cue:12:7
+    }
     b: (int){ 2 }
   }
   baz: (#struct){
diff --git a/cue/testdata/resolve/035_excluded_embedding_from_closing.txtar b/cue/testdata/resolve/035_excluded_embedding_from_closing.txtar
index ebd92c4..ee23ad2 100644
--- a/cue/testdata/resolve/035_excluded_embedding_from_closing.txtar
+++ b/cue/testdata/resolve/035_excluded_embedding_from_closing.txtar
@@ -65,11 +65,14 @@
 -- out/eval --
 Errors:
 V.b: field `extra` not allowed:
+    ./in.cue:3:2
     ./in.cue:6:10
+    ./in.cue:9:4
     ./in.cue:11:5
 V.c: field `e` not allowed:
-    ./in.cue:1:5
+    ./in.cue:3:2
     ./in.cue:4:6
+    ./in.cue:9:4
     ./in.cue:10:5
 
 Result:
@@ -89,22 +92,29 @@
   V: (_|_){
     // [eval]
     c: (_|_){
-      // [eval] V.c: field `e` not allowed:
-      //     ./in.cue:1:5
-      //     ./in.cue:4:6
-      //     ./in.cue:10:5
+      // [eval]
       d: (int){ int }
-      e: (int){ int }
+      e: (_|_){
+        // [eval] V.c: field `e` not allowed:
+        //     ./in.cue:3:2
+        //     ./in.cue:4:6
+        //     ./in.cue:9:4
+        //     ./in.cue:10:5
+      }
     }
     a: (#struct){
       c: (int){ int }
     }
     b: (_|_){
-      // [eval] V.b: field `extra` not allowed:
-      //     ./in.cue:6:10
-      //     ./in.cue:11:5
+      // [eval]
       open: (int){ int }
-      extra: (int){ int }
+      extra: (_|_){
+        // [eval] V.b: field `extra` not allowed:
+        //     ./in.cue:3:2
+        //     ./in.cue:6:10
+        //     ./in.cue:9:4
+        //     ./in.cue:11:5
+      }
     }
   }
 }
diff --git a/cue/types.go b/cue/types.go
index 0848bbb..c871167 100644
--- a/cue/types.go
+++ b/cue/types.go
@@ -647,9 +647,10 @@
 	if v, ok := v.(*adt.Vertex); ok && v.Status() >= adt.Partial {
 		return Value{base.idx, v}
 	}
-	n := &adt.Vertex{Parent: base.v.Parent, Label: base.v.Label}
+	n := &adt.Vertex{Label: base.v.Label}
 	n.AddConjunct(adt.MakeRootConjunct(env, v))
 	n = base.ctx().manifest(n)
+	n.Parent = base.v.Parent
 	return makeValue(base.idx, n)
 }
 
@@ -1555,12 +1556,13 @@
 	var value = convert.GoValueToValue(ctx.opCtx, x, true)
 	n, _ := value.(*adt.Vertex)
 	if n == nil {
-		n = &adt.Vertex{Label: v.v.Label, Parent: v.v.Parent}
+		n = &adt.Vertex{Label: v.v.Label}
 		n.AddConjunct(adt.MakeRootConjunct(nil, value))
 	} else {
 		n.Label = v.v.Label
 	}
 	n.Finalize(ctx.opCtx)
+	n.Parent = v.v.Parent
 	w := makeValue(v.idx, n)
 	return v.Unify(w)
 }
@@ -1652,12 +1654,16 @@
 	if w.v == nil {
 		return v
 	}
-	n := &adt.Vertex{Parent: v.v.Parent, Label: v.v.Label}
+	n := &adt.Vertex{Label: v.v.Label}
 	n.AddConjunct(adt.MakeRootConjunct(nil, v.v))
 	n.AddConjunct(adt.MakeRootConjunct(nil, w.v))
 
 	ctx := v.idx.newContext()
 	n.Finalize(ctx.opCtx)
+
+	// We do not set the parent until here as we don't want to "inherit" the
+	// closedness setting from v.
+	n.Parent = v.v.Parent
 	return makeValue(v.idx, n)
 }
 
@@ -1680,7 +1686,7 @@
 
 	e := eval.New(v.idx.Runtime)
 	ctx := e.NewContext(n)
-	e.UnifyAccept(ctx, n, adt.Finalized, accept.v.Closed)
+	e.UnifyAccept(ctx, n, adt.Finalized, accept.v.Closed) // check for nil.
 
 	// ctx := v.idx.newContext()
 	n.Closed = accept.v.Closed
@@ -2100,6 +2106,8 @@
 			a := []Value{}
 			ctx := v.ctx().opCtx
 			for _, c := range v.v.Conjuncts {
+				// Keep parent here. TODO: do we need remove the requirement
+				// from other conjuncts?
 				n := &adt.Vertex{
 					Parent: v.v.Parent,
 					Label:  v.v.Label,
@@ -2161,7 +2169,6 @@
 			if disjunct.Default {
 				for _, n := range x.Values {
 					a := adt.Vertex{
-						Parent: v.v.Parent,
 						Label:  v.v.Label,
 						Closed: v.v.Closed,
 					}
@@ -2173,6 +2180,7 @@
 					ctx := e.NewContext(nil)
 					e.UnifyAccept(ctx, &a, adt.Finalized, v.v.Closed)
 					e.UnifyAccept(ctx, &b, adt.Finalized, v.v.Closed)
+					a.Parent = v.v.Parent
 					if !n.Default && subsume.Value(ctx, &a, &b) == nil {
 						continue outerExpr
 					}
@@ -2243,12 +2251,11 @@
 			switch x := d.(type) {
 			case adt.Expr:
 				// embedding
-				n := &adt.Vertex{
-					Parent: v.v.Parent,
-					Label:  v.v.Label}
+				n := &adt.Vertex{Label: v.v.Label}
 				c := adt.MakeRootConjunct(envEmbed, x)
 				n.AddConjunct(c)
 				n.Finalize(ctx)
+				n.Parent = v.v.Parent
 				a = append(a, makeValue(v.idx, n))
 
 			default:
@@ -2262,14 +2269,14 @@
 
 		if len(fields) > 0 {
 			n := &adt.Vertex{
-				Parent: v.v.Parent,
-				Label:  v.v.Label,
+				Label: v.v.Label,
 			}
 			c := adt.MakeRootConjunct(env, &adt.StructLit{
 				Decls: fields,
 			})
 			n.AddConjunct(c)
 			n.Finalize(ctx)
+			n.Parent = v.v.Parent
 			a = append(a, makeValue(v.idx, n))
 		}
 
diff --git a/cue/types_test.go b/cue/types_test.go
index 7c7adb4..97bd718 100644
--- a/cue/types_test.go
+++ b/cue/types_test.go
@@ -1075,7 +1075,7 @@
 		a: foo: b: [Bar=string]: { d: Bar }
 		`,
 		path: []string{"a", "foo", "b", ""},
-		want: `{"d":"label","c":"foolabel"}`,
+		want: `{"c":"foolabel","d":"label"}`,
 	}}
 	for _, tc := range testCases {
 		t.Run("", func(t *testing.T) {
@@ -1133,7 +1133,7 @@
 		a: foo: b: [Bar=string]: { d: Bar }
 		`,
 		path: []string{"a", "foo", "b", ""},
-		want: "{\n\td: string\n\tc: string + string\n}",
+		want: "{\n\tc: string + string\n\td: string\n}",
 	}}
 	for _, tc := range testCases {
 		t.Run("", func(t *testing.T) {
diff --git a/doc/tutorial/kubernetes/testdata/manual.out b/doc/tutorial/kubernetes/testdata/manual.out
index be6bf4b..8c276b0 100644
--- a/doc/tutorial/kubernetes/testdata/manual.out
+++ b/doc/tutorial/kubernetes/testdata/manual.out
@@ -33,11 +33,6 @@
         arg: {}
         args: []
         env: {}
-        label: {
-            app:       "bartender"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -50,6 +45,11 @@
                 }
             }
         }
+        label: {
+            app:       "bartender"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -161,11 +161,6 @@
         }
         args: ["-etcd=etcd:2379", "-event-server=events:7788"] | []
         env: {}
-        label: {
-            app:       "breaddispatcher"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -178,6 +173,11 @@
                 }
             }
         }
+        label: {
+            app:       "breaddispatcher"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -286,11 +286,6 @@
         arg: {}
         args: []
         env: {}
-        label: {
-            app:       "host"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -303,6 +298,11 @@
                 }
             }
         }
+        label: {
+            app:       "host"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -411,11 +411,6 @@
         arg: {}
         args: []
         env: {}
-        label: {
-            app:       "maitred"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -428,6 +423,11 @@
                 }
             }
         }
+        label: {
+            app:       "maitred"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -539,11 +539,6 @@
         port: {}
         args: ["-http=:8080", "-etcd=etcd:2379"] | []
         env: {}
-        label: {
-            app:       "valeter"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -556,6 +551,11 @@
                 }
             }
         }
+        label: {
+            app:       "valeter"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -664,11 +664,6 @@
         arg: {}
         args: []
         env: {}
-        label: {
-            app:       "waiter"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -681,6 +676,11 @@
                 }
             }
         }
+        label: {
+            app:       "waiter"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -792,11 +792,6 @@
         }
         args: ["-http=:8080", "-etcd=etcd:2379"] | []
         env: {}
-        label: {
-            app:       "waterdispatcher"
-            domain:    "prod"
-            component: "frontend"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -809,6 +804,11 @@
                 }
             }
         }
+        label: {
+            app:       "waterdispatcher"
+            domain:    "prod"
+            component: "frontend"
+        }
         envSpec: {}
         volume: {}
     }
@@ -2027,11 +2027,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "caller"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -2055,6 +2050,11 @@
                 }
             }
         }
+        label: {
+            app:       "caller"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -2248,11 +2248,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "dishwasher"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -2276,6 +2271,11 @@
                 }
             }
         }
+        label: {
+            app:       "dishwasher"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -2457,11 +2457,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "expiditer"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -2485,6 +2480,11 @@
                 }
             }
         }
+        label: {
+            app:       "expiditer"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -2654,11 +2654,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "headchef"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -2682,6 +2677,11 @@
                 }
             }
         }
+        label: {
+            app:       "headchef"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -2855,11 +2855,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "linecook"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -2883,6 +2878,11 @@
                 }
             }
         }
+        label: {
+            app:       "linecook"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -3056,11 +3056,6 @@
                 kubernetes: {}
             }
         }
-        label: {
-            app:       "pastrychef"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -3084,6 +3079,11 @@
                 }
             }
         }
+        label: {
+            app:       "pastrychef"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
     }
 }
@@ -3222,11 +3222,6 @@
         arg: {}
         args: []
         env: {}
-        label: {
-            app:       "souschef"
-            domain:    "prod"
-            component: "kitchen"
-        }
         kubernetes: {
             spec: {
                 template: {
@@ -3250,6 +3245,11 @@
                 }
             }
         }
+        label: {
+            app:       "souschef"
+            domain:    "prod"
+            component: "kitchen"
+        }
         envSpec: {}
         volume: {}
     }
diff --git a/doc/tutorial/kubernetes/testdata/quick.out b/doc/tutorial/kubernetes/testdata/quick.out
index da4fd2e..41df1aa 100644
--- a/doc/tutorial/kubernetes/testdata/quick.out
+++ b/doc/tutorial/kubernetes/testdata/quick.out
@@ -16,15 +16,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "bartender"
-                domain:    "prod"
                 component: "frontend"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "bartender"
             labels: {
@@ -33,8 +35,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -46,8 +46,8 @@
                 metadata: {
                     labels: {
                         app:       "bartender"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -72,8 +72,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -86,15 +86,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "breaddispatcher"
-                domain:    "prod"
                 component: "frontend"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "breaddispatcher"
             labels: {
@@ -103,8 +105,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -116,8 +116,8 @@
                 metadata: {
                     labels: {
                         app:       "breaddispatcher"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -142,8 +142,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -156,15 +156,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "host"
-                domain:    "prod"
                 component: "frontend"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "host"
             labels: {
@@ -173,8 +175,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -186,8 +186,8 @@
                 metadata: {
                     labels: {
                         app:       "host"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -212,8 +212,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -226,15 +226,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "maitred"
-                domain:    "prod"
                 component: "frontend"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "maitred"
             labels: {
@@ -243,8 +245,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -256,8 +256,8 @@
                 metadata: {
                     labels: {
                         app:       "maitred"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -282,8 +282,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -305,6 +305,8 @@
                 component: "frontend"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "valeter"
             labels: {
@@ -313,8 +315,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -326,8 +326,8 @@
                 metadata: {
                     labels: {
                         app:       "valeter"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -352,8 +352,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -366,15 +366,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "waiter"
-                domain:    "prod"
                 component: "frontend"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "waiter"
             labels: {
@@ -383,8 +385,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -396,8 +396,8 @@
                 metadata: {
                     labels: {
                         app:       "waiter"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -421,8 +421,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -444,6 +444,8 @@
                 component: "frontend"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "waterdispatcher"
             labels: {
@@ -452,8 +454,6 @@
                 component: "frontend"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -465,8 +465,8 @@
                 metadata: {
                     labels: {
                         app:       "waterdispatcher"
-                        domain:    "prod"
                         component: "frontend"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -491,8 +491,8 @@
                 component: "frontend"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "frontend"
@@ -511,15 +511,17 @@
             ports: [{
                 port:       7080
                 targetPort: 7080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "download"
-                domain:    "prod"
                 component: "infra"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "download"
             labels: {
@@ -528,8 +530,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -541,8 +541,8 @@
                 metadata: {
                     labels: {
                         app:       "download"
-                        domain:    "prod"
                         component: "infra"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -562,8 +562,8 @@
                 component: "infra"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "infra"
@@ -577,8 +577,8 @@
             ports: [{
                 port:       2379
                 targetPort: 2379
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }, {
                 name:       "peer"
                 port:       2380
@@ -591,6 +591,8 @@
                 component: "infra"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "etcd"
             labels: {
@@ -599,8 +601,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {}
@@ -710,8 +710,8 @@
                 component: "infra"
             }
         }
-        kind:       "StatefulSet"
         apiVersion: "apps/v1"
+        kind:       "StatefulSet"
     }
 }
 configMap: {}
@@ -730,6 +730,8 @@
                 component: "infra"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "events"
             labels: {
@@ -738,8 +740,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -751,8 +751,8 @@
                 metadata: {
                     labels: {
                         app:       "events"
-                        domain:    "prod"
                         component: "infra"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -803,8 +803,8 @@
                 component: "infra"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "infra"
@@ -828,6 +828,8 @@
                 component: "infra"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "tasks"
             labels: {
@@ -836,8 +838,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -849,8 +849,8 @@
                 metadata: {
                     labels: {
                         app:       "tasks"
-                        domain:    "prod"
                         component: "infra"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -886,8 +886,8 @@
                 component: "infra"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "infra"
@@ -900,15 +900,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "updater"
-                domain:    "prod"
                 component: "infra"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "updater"
             labels: {
@@ -917,8 +919,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -930,8 +930,8 @@
                 metadata: {
                     labels: {
                         app:       "updater"
-                        domain:    "prod"
                         component: "infra"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -962,8 +962,8 @@
                 component: "infra"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "infra"
@@ -987,6 +987,8 @@
                 component: "infra"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "watcher"
             labels: {
@@ -995,8 +997,6 @@
                 component: "infra"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1008,8 +1008,8 @@
                 metadata: {
                     labels: {
                         app:       "watcher"
-                        domain:    "prod"
                         component: "infra"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -1041,8 +1041,8 @@
                 component: "infra"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "infra"
@@ -1061,15 +1061,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "caller"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "caller"
             labels: {
@@ -1078,8 +1080,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1091,8 +1091,8 @@
                 metadata: {
                     labels: {
                         app:       "caller"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1153,8 +1153,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1167,15 +1167,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "dishwasher"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "dishwasher"
             labels: {
@@ -1184,8 +1186,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1197,8 +1197,8 @@
                 metadata: {
                     labels: {
                         app:       "dishwasher"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1259,8 +1259,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1273,15 +1273,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "expiditer"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "expiditer"
             labels: {
@@ -1290,8 +1292,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1303,8 +1303,8 @@
                 metadata: {
                     labels: {
                         app:       "expiditer"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1356,8 +1356,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1370,15 +1370,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "headchef"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "headchef"
             labels: {
@@ -1387,8 +1389,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1400,8 +1400,8 @@
                 metadata: {
                     labels: {
                         app:       "headchef"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1453,8 +1453,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1467,15 +1467,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "linecook"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "linecook"
             labels: {
@@ -1484,8 +1486,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1497,8 +1497,8 @@
                 metadata: {
                     labels: {
                         app:       "linecook"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1550,8 +1550,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1564,15 +1564,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "pastrychef"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "pastrychef"
             labels: {
@@ -1581,8 +1583,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1594,8 +1594,8 @@
                 metadata: {
                     labels: {
                         app:       "pastrychef"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1647,8 +1647,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1661,15 +1661,17 @@
             ports: [{
                 port:       8080
                 targetPort: 8080
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "souschef"
-                domain:    "prod"
                 component: "kitchen"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "souschef"
             labels: {
@@ -1678,8 +1680,6 @@
                 component: "kitchen"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1691,8 +1691,8 @@
                 metadata: {
                     labels: {
                         app:       "souschef"
-                        domain:    "prod"
                         component: "kitchen"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -1723,8 +1723,8 @@
                 component: "kitchen"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "kitchen"
@@ -1766,8 +1766,8 @@
                 component: "mon"
             }
         }
-        kind:       "Service"
         apiVersion: "v1"
+        kind:       "Service"
     }
 }
 deployment: {
@@ -1784,8 +1784,8 @@
                     name: "alertmanager"
                     labels: {
                         app:       "alertmanager"
-                        domain:    "prod"
                         component: "mon"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -1823,8 +1823,8 @@
                 component: "mon"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "mon"
@@ -1875,6 +1875,8 @@
                 component: "mon"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "grafana"
             labels: {
@@ -1883,8 +1885,6 @@
                 component: "mon"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -1903,8 +1903,8 @@
                 metadata: {
                     labels: {
                         app:       "grafana"
-                        domain:    "prod"
                         component: "mon"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -1949,8 +1949,8 @@
                 }
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "mon"
@@ -1985,8 +1985,8 @@
                 component: "mon"
             }
         }
-        kind:       "Service"
         apiVersion: "v1"
+        kind:       "Service"
     }
 }
 deployment: {}
@@ -2056,8 +2056,8 @@
                 component: "mon"
             }
         }
-        kind:       "DaemonSet"
         apiVersion: "apps/v1"
+        kind:       "DaemonSet"
     }
 }
 statefulSet: {}
@@ -2092,8 +2092,8 @@
                 component: "mon"
             }
         }
-        kind:       "Service"
         apiVersion: "v1"
+        kind:       "Service"
     }
 }
 deployment: {
@@ -2117,8 +2117,8 @@
                     name: "prometheus"
                     labels: {
                         app:       "prometheus"
-                        domain:    "prod"
                         component: "mon"
+                        domain:    "prod"
                     }
                     annotations: {
                         "prometheus.io.scrape": "true"
@@ -2153,8 +2153,8 @@
                 component: "mon"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "mon"
@@ -2413,15 +2413,17 @@
             ports: [{
                 port:       4180
                 targetPort: 4180
-                name:       "client"
                 protocol:   "TCP"
+                name:       "client"
             }]
             selector: {
                 app:       "authproxy"
-                domain:    "prod"
                 component: "proxy"
+                domain:    "prod"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "authproxy"
             labels: {
@@ -2430,8 +2432,6 @@
                 component: "proxy"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -2443,8 +2443,8 @@
                 metadata: {
                     labels: {
                         app:       "authproxy"
-                        domain:    "prod"
                         component: "proxy"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -2475,8 +2475,8 @@
                 component: "proxy"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "proxy"
@@ -2566,6 +2566,8 @@
                 component: "proxy"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "goget"
             labels: {
@@ -2574,8 +2576,6 @@
                 component: "proxy"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -2587,8 +2587,8 @@
                 metadata: {
                     labels: {
                         app:       "goget"
-                        domain:    "prod"
                         component: "proxy"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -2618,8 +2618,8 @@
                 component: "proxy"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "proxy"
@@ -2648,6 +2648,8 @@
                 component: "proxy"
             }
         }
+        apiVersion: "v1"
+        kind:       "Service"
         metadata: {
             name: "nginx"
             labels: {
@@ -2656,8 +2658,6 @@
                 component: "proxy"
             }
         }
-        kind:       "Service"
-        apiVersion: "v1"
     }
 }
 deployment: {
@@ -2669,8 +2669,8 @@
                 metadata: {
                     labels: {
                         app:       "nginx"
-                        domain:    "prod"
                         component: "proxy"
+                        domain:    "prod"
                     }
                 }
                 spec: {
@@ -2711,8 +2711,8 @@
                 component: "proxy"
             }
         }
-        kind:       "Deployment"
         apiVersion: "apps/v1"
+        kind:       "Deployment"
     }
 }
 #Component: "proxy"
diff --git a/internal/core/adt/composite.go b/internal/core/adt/composite.go
index 3cbbbd1..bb63010 100644
--- a/internal/core/adt/composite.go
+++ b/internal/core/adt/composite.go
@@ -281,6 +281,12 @@
 	w := *v
 	w.Arcs = arcs
 	w.isData = true
+	w.Conjuncts = make([]Conjunct, len(v.Conjuncts))
+	copy(w.Conjuncts, v.Conjuncts)
+	for i := range w.Conjuncts {
+		w.Conjuncts[i].CloseID = 0
+	}
+	w.Closed = nil
 	return &w
 }
 
@@ -590,6 +596,10 @@
 	return c.x.Source()
 }
 
+func (c *Conjunct) Field() Node {
+	return c.x
+}
+
 func (c *Conjunct) Expr() Expr {
 	switch x := c.x.(type) {
 	case Expr:
diff --git a/internal/core/eval/closed.go b/internal/core/eval/closed.go
index 175b08d..a33c324 100644
--- a/internal/core/eval/closed.go
+++ b/internal/core/eval/closed.go
@@ -14,34 +14,35 @@
 
 package eval
 
-// The file implements the majority of the closed struct semantics.
-// The data is recorded in the Closed field of a Vertex.
+// The file implements the majority of the closed struct semantics. The data is
+// recorded in the Closed field of a Vertex.
 //
 // Each vertex has a set of conjuncts that make up the values of the vertex.
 // Each Conjunct may originate from various sources, like an embedding, field
 // definition or regular value. For the purpose of computing the value, the
-// source of the conjunct is irrelevant. The origin does matter, however, if
-// for determining whether a field is allowed in a closed struct. The Closed
-// field keeps track of the kind of origin for this purpose.
+// source of the conjunct is irrelevant. The origin does matter, however, for
+// determining whether a field is allowed in a closed struct. The Closed field
+// keeps track of the kind of origin for this purpose.
 //
-// More precisely, the CloseDef struct explains how the conjuncts of an arc
-// were combined and define a logical expression on the field sets
-// computed for each conjunct.
+// More precisely, the CloseDef struct explains how the conjuncts of an arc were
+// combined for instance due to a conjunction with closed struct or through an
+// embedding. Each Vertex may be associated with a slice of CloseDefs. The
+// position of a CloseDef in a file corresponds to an adt.ID.
 //
-// While evaluating each conjunct, nodeContext keeps track what changes need to
-// be made to ClosedDef based on the evaluation of the current conjuncts.
-// For instance, if a field references a definition, all other previous
-// checks are useless, as the newly referred to definitions define an upper
-// bound and will contain all the information that is necessary to determine
-// whether a field may be included.
+// While evaluating each conjunct, new CloseDefs are added to indicate how a
+// conjunct relates to its parent as needed. For instance, if a field references
+// a definition, all other previous checks are useless, as the newly referred to
+// definitions define an upper bound and will contain all the information that
+// is necessary to determine whether a field may be included.
 //
 // Most of the logic in this file concerns itself with the combination of
 // multiple CloseDef values as well as traversing the structure to validate
-// whether an arc is allowed. The actual fieldSet logic is in optional.go
-// The overal control and use of the functionality in this file is used
-// in eval.go.
+// whether an arc is allowed. The actual fieldSet logic is in optional.go The
+// overall control and use of the functionality in this file is used in eval.go.
 
 import (
+	"fmt"
+
 	"cuelang.org/go/internal/core/adt"
 )
 
@@ -66,17 +67,34 @@
 // `b` originated from an embedding, as otherwise `e` may not be allowed.
 //
 type acceptor struct {
-	tree     *CloseDef
-	fields   []fieldSet
+	Canopy []CloseDef
+	Fields []*fieldSet
+
+	// TODO: remove (unused as not fine-grained enough)
+	// isClosed is now used as an approximate filter.
 	isClosed bool
 	isList   bool
 	openList bool
 }
 
+func (a *acceptor) clone() *acceptor {
+	canopy := make([]CloseDef, len(a.Canopy))
+	copy(canopy, a.Canopy)
+	for i := range canopy {
+		canopy[i].IsClosed = false
+	}
+	return &acceptor{
+		Canopy:   canopy,
+		isClosed: a.isClosed,
+	}
+}
+
 func (a *acceptor) Accept(c *adt.OpContext, f adt.Feature) bool {
 	if a.isList {
 		return a.openList
 	}
+
+	// TODO: remove these two checks and always pass InvalidLabel.
 	if !a.isClosed {
 		return true
 	}
@@ -86,19 +104,19 @@
 	if f.IsInt() {
 		return a.openList
 	}
-	return a.verifyArcAllowed(c, f) == nil
+	return a.verifyArcAllowed(c, f, nil)
 }
 
 func (a *acceptor) MatchAndInsert(c *adt.OpContext, v *adt.Vertex) {
-	for _, fs := range a.fields {
+	a.visitAllFieldSets(func(fs *fieldSet) {
 		fs.MatchAndInsert(c, v)
-	}
+	})
 }
 
 func (a *acceptor) OptionalTypes() (mask adt.OptionalType) {
-	for _, f := range a.fields {
+	a.visitAllFieldSets(func(f *fieldSet) {
 		mask |= f.OptionalTypes()
-	}
+	})
 	return mask
 }
 
@@ -114,302 +132,474 @@
 // Acceptor. This could be implemented as an allocation-free wrapper type around
 // a Disjunction. This will require a bit more API cleaning, though.
 func newDisjunctionAcceptor(x *adt.Disjunction) adt.Acceptor {
-	tree := &CloseDef{Src: x}
-	sets := []fieldSet{}
+	n := &acceptor{}
+
 	for _, d := range x.Values {
 		if a, _ := d.Closed.(*acceptor); a != nil {
-			sets = append(sets, a.fields...)
-			tree.List = append(tree.List, a.tree)
+			offset := n.InsertSubtree(0, nil, d, false)
+			a.visitAllFieldSets(func(f *fieldSet) {
+				g := *f
+				g.id += offset
+				n.insertFieldSet(g.id, &g)
+			})
 		}
 	}
-	if len(tree.List) == 0 && len(sets) == 0 {
-		return nil
-	}
-	return &acceptor{tree: tree, fields: sets}
+
+	return n
 }
 
-// CloseDef defines how individual FieldSets (corresponding to conjuncts)
+// CloseDef defines how individual fieldSets (corresponding to conjuncts)
 // combine to determine whether a field is contained in a closed set.
 //
-// Nodes with a non-empty List and IsAnd is false represent embeddings.
-// The ID is the node that contained the embedding in that case.
-//
-// Nodes with a non-empty List and IsAnd is true represent conjunctions of
-// definitions. In this case, a field must be contained in each definition.
-//
-// If a node has both conjunctions of definitions and embeddings, only the
-// former are maintained. Conjunctions of definitions define an upper bound
-// of the set of allowed fields in that case and the embeddings will not add
-// any value.
+// A CloseDef combines multiple conjuncts and embeddings. All CloseDefs are
+// stored in slice. References to other CloseDefs are indices within this slice.
+// Together they define the top of the tree of the expression tree of how
+// conjuncts combine together (a canopy).
 type CloseDef struct {
-	Src   adt.Node // for error reporting
-	ID    adt.ID
-	IsAnd bool
-	List  []*CloseDef
+	Src adt.Node
+
+	// And is used to track the IDs of a set of conjuncts. If IsDef or IsClosed
+	// is true, a field is only allowed if at least one of the corresponding
+	// fieldsets associated with this node or its embeddings allows it.
+	//
+	// And nodes are linked in a ring, meaning that the last node points back
+	// to the first node. This allows a traversal of all and nodes to commence
+	// at any point in the ring.
+	And adt.ID
+
+	// NextEmbed indicates the first ID for a linked list of embedded
+	// expressions. The node corresponding to the actual embedding is at
+	// position NextEmbed+1. The linked-list nodes all have a value of -1 for
+	// And. NextEmbed is 0 for the last element in the list.
+	NextEmbed adt.ID
+
+	// IsDef indicates this node is associated with a definition and that all
+	// expressions are recursively closed. This value is "sticky" when a child
+	// node copies the closedness data from a parent node.
+	IsDef bool
+
+	// IsClosed indicates this node is associated with the result of close().
+	// A child vertex should not "inherit" this value.
+	IsClosed bool
 }
 
-// isOr reports whether this is a node representing embeddings.
-func isOr(c *CloseDef) bool {
-	return len(c.List) > 0 && !c.IsAnd
+func (n *CloseDef) isRequired() bool {
+	return n.IsDef || n.IsClosed
 }
 
-// updateClosed transforms c into a new node with all non-AND nodes with an
-// ID matching one in replace substituted with the replace value.
-//
-// Vertex only keeps track of a flat list of conjuncts and does not keep track
-// of the hierarchy of how these were derived. This function allows rewriting
-// a CloseDef tree based on replacement information gathered during evaluation
-// of this flat list.
-//
-func updateClosed(c *CloseDef, replace map[adt.ID]*CloseDef) *CloseDef { // used in eval.go
-	// Insert an entry for CloseID 0 if we are about to replace it. By default
-	// 0, which is the majority case, is omitted.
-	if c != nil && replace[0] != nil && !containsClosed(c, 0) {
-		c = &CloseDef{IsAnd: true, List: []*CloseDef{c, {}}}
-	}
+const embedRoot adt.ID = -1
 
-	switch {
-	case c == nil:
-		and := []*CloseDef{}
-		for _, c := range replace {
-			if c != nil {
-				and = append(and, c)
-			}
+type Entry = fieldSet
+
+func (c *acceptor) visitAllFieldSets(f func(f *fieldSet)) {
+	for _, set := range c.Fields {
+		for ; set != nil; set = set.next {
+			f(set)
 		}
-		switch len(and) {
-		case 0:
-		case 1:
-			c = and[0]
-		default:
-			c = &CloseDef{IsAnd: true, List: and}
-		}
-		// needClose
-	case len(replace) > 0:
-		c = updateClosedRec(c, replace)
 	}
-	return c
 }
 
-func updateClosedRec(c *CloseDef, replace map[adt.ID]*CloseDef) *CloseDef {
-	if c == nil {
+func (c *acceptor) visitAnd(id adt.ID, f func(id adt.ID, n CloseDef) bool) bool {
+	for i := id; ; {
+		x := c.Canopy[i]
+
+		if !f(i, x) {
+			return false
+		}
+
+		if i = x.And; i == id {
+			break
+		}
+	}
+	return true
+}
+
+func (c *acceptor) visitOr(id adt.ID, f func(id adt.ID, n CloseDef) bool) bool {
+	if !f(id, c.Canopy[id]) {
+		return false
+	}
+	return c.visitEmbed(id, f)
+}
+
+func (c *acceptor) visitEmbed(id adt.ID, f func(id adt.ID, n CloseDef) bool) bool {
+	for i := c.Canopy[id].NextEmbed; i != 0; i = c.Canopy[i].NextEmbed {
+		if id := i + 1; !f(id, c.Canopy[id]) {
+			return false
+		}
+	}
+	return true
+}
+
+func (c *acceptor) node(id adt.ID) *CloseDef {
+	if len(c.Canopy) == 0 {
+		c.Canopy = append(c.Canopy, CloseDef{})
+	}
+	return &c.Canopy[id]
+}
+
+func (c *acceptor) fieldSet(at adt.ID) *fieldSet {
+	if int(at) >= len(c.Fields) {
 		return nil
 	}
-
-	// If c is a leaf or AND node, replace it outright. If both are an OR node,
-	// merge the lists.
-	if len(c.List) == 0 || !c.IsAnd {
-		switch sub, ok := replace[c.ID]; {
-		case sub != nil:
-			if isOr(sub) && isOr(c) {
-				sub.List = append(sub.List, c.List...)
-			}
-			return sub
-
-		case !ok:
-			if len(c.List) == 0 {
-				return nil // drop from list
-			}
-		}
-	}
-
-	changed := false
-	buf := make([]*CloseDef, len(c.List))
-	k := 0
-	for _, c := range c.List {
-		n := updateClosedRec(c, replace)
-		changed = changed || n != c
-		if n != nil {
-			buf[k] = n
-			k++
-		}
-	}
-	if !changed {
-		return c
-	}
-
-	switch k {
-	case 0:
-		return nil
-	case 1:
-		return buf[0]
-	default:
-		return &CloseDef{Src: c.Src, ID: c.ID, IsAnd: c.IsAnd, List: buf[:k]}
-	}
+	return c.Fields[at]
 }
 
-// UpdateReplace is called after evaluating a conjunct at the top of the arc
-// to update the replacement information with the gathered CloseDef info.
-func (n *nodeContext) updateReplace(id adt.ID) { // used in eval.go
-	if n.newClose == nil {
-		return
+func (c *acceptor) insertFieldSet(at adt.ID, e *fieldSet) {
+	c.node(0) // Ensure the canopy is at least length 1.
+	if len(c.Fields) < len(c.Canopy) {
+		a := make([]*fieldSet, len(c.Canopy))
+		copy(a, c.Fields)
+		c.Fields = a
 	}
-
-	if n.replace == nil {
-		n.replace = make(map[adt.ID]*CloseDef)
-	}
-
-	n.replace[id] = updateClose(n.replace[id], n.newClose)
-	n.newClose = nil
+	e.next = c.Fields[at]
+	c.Fields[at] = e
 }
 
-// appendList creates a new CloseDef with the elements of the list of orig
-// and updated appended. It will take the ID of orig. It does not alter
-// either orig or update.
-func appendLists(orig, update *CloseDef) *CloseDef {
-	list := make([]*CloseDef, len(orig.List)+len(update.List))
-	copy(list[copy(list, orig.List):], update.List)
-	c := *orig
-	c.List = list
-	return &c
+// InsertDefinition appends a new CloseDef to Canopy representing a reference to
+// a definition at the given position. It returns the position of the new
+// CloseDef.
+func (c *acceptor) InsertDefinition(at adt.ID, src adt.Node) (id adt.ID) {
+	if len(c.Canopy) == 0 {
+		c.Canopy = append(c.Canopy, CloseDef{})
+	}
+	if int(at) >= len(c.Canopy) {
+		panic(fmt.Sprintf("at >= len(canopy) (%d >= %d)", at, len(c.Canopy)))
+	}
+	// New there is a new definition, the parent location (invariant) is no
+	// longer a required entry and could be dropped if there were no more
+	// fields.
+	//    #orig: #d     // only fields in #d are sufficient to check.
+	//    #orig: {a: b}
+	c.Canopy[at].IsDef = false
+
+	id = adt.ID(len(c.Canopy))
+	y := CloseDef{
+		Src:       src,
+		And:       c.Canopy[at].And,
+		NextEmbed: 0,
+		IsDef:     true,
+	}
+	c.Canopy[at].And = id
+	c.Canopy = append(c.Canopy, y)
+
+	return id
 }
 
-// updateClose merges update into orig without altering either.
-//
-// The merge takes into account whether it is an embedding node or not.
-// Most notably, if an "And" node is combined with an embedding, the
-// embedding information may be discarded.
-func updateClose(orig, update *CloseDef) *CloseDef {
-	switch {
-	case orig == nil:
-		return update
-	case isOr(orig):
-		if !isOr(update) {
-			return update
-		}
-		return appendLists(orig, update)
-	case isOr(update):
-		return orig
-	case len(orig.List) == 0 && len(update.List) == 0:
-		return &CloseDef{IsAnd: true, List: []*CloseDef{orig, update}}
-	case len(orig.List) == 0:
-		update.List = append(update.List, orig)
-		return update
-	default: // isAnd(orig)
-		return appendLists(orig, update)
+// InsertEmbed appends a new CloseDef to Canopy representing the use of an
+// embedding at the given position. It returns the position of the new CloseDef.
+func (c *acceptor) InsertEmbed(at adt.ID, src adt.Node) (id adt.ID) {
+	if len(c.Canopy) == 0 {
+		c.Canopy = append(c.Canopy, CloseDef{})
 	}
+	if int(at) >= len(c.Canopy) {
+		panic(fmt.Sprintf("at >= len(canopy) (%d >= %d)", at, len(c.Canopy)))
+	}
+
+	id = adt.ID(len(c.Canopy))
+	y := CloseDef{
+		And:       -1,
+		NextEmbed: c.Canopy[at].NextEmbed,
+	}
+	z := CloseDef{Src: src, And: id + 1}
+	c.Canopy[at].NextEmbed = id
+	c.Canopy = append(c.Canopy, y, z)
+
+	return id + 1
 }
 
-func (n *nodeContext) addAnd(c *CloseDef) { // used in eval.go
-	switch {
-	case n.newClose == nil:
-		n.newClose = c
-	case isOr(n.newClose):
-		n.newClose = c
-	case len(n.newClose.List) == 0:
-		n.newClose = &CloseDef{
-			IsAnd: true,
-			List:  []*CloseDef{n.newClose, c},
-		}
-	default:
-		n.newClose.List = append(n.newClose.List, c)
-	}
-}
-
-func (n *nodeContext) addOr(parentID adt.ID, c *CloseDef) { // used in eval.go
-	switch {
-	case n.newClose == nil:
-		d := &CloseDef{ID: parentID, List: []*CloseDef{{ID: parentID}}}
-		if c != nil {
-			d.List = append(d.List, c)
-		}
-		n.newClose = d
-	case isOr(n.newClose):
-		d := n.newClose
-		if c != nil {
-			d.List = append(d.List, c)
-		}
-	}
-}
-
-// verifyArcAllowed checks whether f is an allowed label within the current
-// node. It traverses c considering the "or" semantics of embeddings and the
-// "and" semantics of conjunctions. It generates an error if a field is not
-// allowed.
-func (n *acceptor) verifyArcAllowed(ctx *adt.OpContext, f adt.Feature) *adt.Bottom {
-	defer ctx.ReleasePositions(ctx.MarkPositions())
-
-	// TODO(perf): this will also generate a message for f == 0. Do something
-	// more clever and only generate this when it is a user error.
-	filter := f.IsString() || f == adt.InvalidLabel
-	if filter && !n.verifyArcRecursive(ctx, n.tree, f) {
-		collectPositions(ctx, n.tree)
-		label := f.SelectorString(ctx)
-		return ctx.NewErrf("field `%s` not allowed", label)
-	}
-	return nil
-}
-
-func (n *acceptor) verifyArcRecursive(ctx *adt.OpContext, c *CloseDef, f adt.Feature) bool {
-	if len(c.List) == 0 {
-		return n.verifyDefinition(ctx, c.ID, f)
-	}
-	if c.IsAnd {
-		for _, c := range c.List {
-			if !n.verifyArcRecursive(ctx, c, f) {
-				return false
-			}
-		}
+// isComplexStruct reports whether the Closed information should be copied as a
+// subtree into the parent node using InsertSubtree. If not, the conjuncts can
+// just be inserted at the current ID.
+func isComplexStruct(v *adt.Vertex) bool {
+	m, _ := v.Value.(*adt.StructMarker)
+	if m == nil {
 		return true
 	}
-	for _, c := range c.List {
-		if n.verifyArcRecursive(ctx, c, f) {
-			return true
-		}
+	a, _ := v.Closed.(*acceptor)
+	if a == nil {
+		return false
 	}
-	return false
+	if a.isClosed {
+		return true
+	}
+	switch len(a.Canopy) {
+	case 0:
+		return false
+	case 1:
+		// TODO: should we check for closedness?
+		x := a.Canopy[0]
+		return x.isRequired()
+	}
+	return true
 }
 
-// verifyDefinition reports whether f is a valid member for any of the fieldSets
-// with the same closeID.
-func (n *acceptor) verifyDefinition(ctx *adt.OpContext, closeID adt.ID, f adt.Feature) (ok bool) {
-	for _, o := range n.fields {
-		if o.id != closeID {
-			continue
+// InsertSubtree inserts the closedness information of v into c as an embedding
+// at the current position and inserts conjuncts of v into n (if not nil).
+// It inserts it as an embedding and not and to cover either case. The idea is
+// that one of the values were supposed to be closed, a separate node entry
+// would already have been created.
+func (c *acceptor) InsertSubtree(at adt.ID, n *nodeContext, v *adt.Vertex, cyclic bool) adt.ID {
+	if len(c.Canopy) == 0 {
+		c.Canopy = append(c.Canopy, CloseDef{})
+	}
+	if int(at) >= len(c.Canopy) {
+		panic(fmt.Sprintf("at >= len(canopy) (%d >= %d)", at, len(c.Canopy)))
+	}
+
+	a := closedInfo(v)
+	a.node(0)
+
+	id := adt.ID(len(c.Canopy))
+	y := CloseDef{
+		And:       embedRoot,
+		NextEmbed: c.Canopy[at].NextEmbed,
+	}
+	c.Canopy[at].NextEmbed = id
+
+	c.Canopy = append(c.Canopy, y)
+	id = adt.ID(len(c.Canopy))
+
+	// First entry is at the embedded node location.
+	c.Canopy = append(c.Canopy, a.Canopy...)
+
+	// Shift all IDs for the new offset.
+	for i, x := range c.Canopy[id:] {
+		if x.And != -1 {
+			c.Canopy[int(id)+i].And += id
+		}
+		if x.NextEmbed != 0 {
+			c.Canopy[int(id)+i].NextEmbed += id
+		}
+	}
+
+	if n != nil {
+		for _, c := range v.Conjuncts {
+			c = updateCyclic(c, cyclic, nil)
+			c.CloseID += id
+			n.addExprConjunct(c)
+		}
+	}
+
+	return id
+}
+
+func (c *acceptor) verifyArc(ctx *adt.OpContext, f adt.Feature, v *adt.Vertex) (found bool, err *adt.Bottom) {
+
+	defer ctx.ReleasePositions(ctx.MarkPositions())
+
+	c.node(0) // ensure at least a size of 1.
+	if c.verify(ctx, f) {
+		return true, nil
+	}
+
+	// TODO: also disallow non-hidden definitions.
+	if !f.IsString() && f != adt.InvalidLabel {
+		return false, nil
+	}
+
+	if v != nil {
+		for _, c := range v.Conjuncts {
+			if pos := c.Field(); pos != nil {
+				ctx.AddPosition(pos)
+			}
+		}
+	}
+
+	// collect positions from tree.
+	for _, c := range c.Canopy {
+		if c.Src != nil {
+			ctx.AddPosition(c.Src)
+		}
+	}
+
+	label := f.SelectorString(ctx)
+	return false, ctx.NewErrf("field `%s` not allowed", label)
+}
+
+func (c *acceptor) verifyArcAllowed(ctx *adt.OpContext, f adt.Feature, v *adt.Vertex) bool {
+
+	// TODO: also disallow non-hidden definitions.
+	if !f.IsString() && f != adt.InvalidLabel {
+		return true
+	}
+
+	defer ctx.ReleasePositions(ctx.MarkPositions())
+
+	c.node(0) // ensure at least a size of 1.
+	return c.verify(ctx, f)
+}
+
+func (c *acceptor) verify(ctx *adt.OpContext, f adt.Feature) bool {
+	ok, required := c.verifyAnd(ctx, 0, f)
+	return ok || (!required && !c.isClosed)
+}
+
+// verifyAnd reports whether f is contained in all closed conjuncts at id and,
+// if not, whether the precense of at least one entry is required.
+func (c *acceptor) verifyAnd(ctx *adt.OpContext, id adt.ID, f adt.Feature) (found, required bool) {
+	for i := id; ; {
+		x := c.Canopy[i]
+
+		if ok, req := c.verifySets(ctx, i, f); ok {
+			found = true
+		} else if ok, isClosed := c.verifyEmbed(ctx, i, f); ok {
+			found = true
+		} else if req || x.isRequired() {
+			// Not found for a closed entry so this indicates a failure.
+			return false, true
+		} else if isClosed {
+			// The node itself isn't closed, but an embedding indicates it
+			// should. See cue/testdata/definitions/embed.txtar.
+			required = true
 		}
 
+		if i = x.And; i == id {
+			break
+		}
+	}
+
+	return found, required
+}
+
+// verifyEmbed reports whether any of the embeddings for the node at id allows f
+// and, if not, whether the embeddings imply that the enclosing node should be
+// closed. The latter is the case when embedded struct itself is closed.
+func (c *acceptor) verifyEmbed(ctx *adt.OpContext, id adt.ID, f adt.Feature) (found, isClosed bool) {
+
+	for i := c.Canopy[id].NextEmbed; i != 0; i = c.Canopy[i].NextEmbed {
+		ok, req := c.verifyAnd(ctx, i+1, f)
+		if ok {
+			return true, false
+		}
+		if req {
+			isClosed = true
+		}
+	}
+	return false, isClosed
+}
+
+func (c *acceptor) verifySets(ctx *adt.OpContext, id adt.ID, f adt.Feature) (found, required bool) {
+	o := c.fieldSet(id)
+	if o == nil {
+		return false, false
+	}
+	for ; o != nil; o = o.next {
 		if len(o.additional) > 0 || o.isOpen {
-			return true
+			return true, false
 		}
 
 		for _, g := range o.fields {
 			if f == g.label {
-				return true
+				return true, false
 			}
 		}
 
 		for _, b := range o.bulk {
 			if b.check.Match(ctx, f) {
-				return true
+				return true, false
 			}
 		}
 	}
-	for _, o := range n.fields {
+
+	// TODO: this is the same location where code is registered as the old code,
+	// but
+	for o := c.Fields[id]; o != nil; o = o.next {
 		if o.pos != nil {
 			ctx.AddPosition(o.pos)
 		}
 	}
-	return false
+	return false, false
 }
 
-func collectPositions(ctx *adt.OpContext, c *CloseDef) {
-	if c.Src != nil {
-		ctx.AddPosition(c.Src)
-	}
-	for _, x := range c.List {
-		collectPositions(ctx, x)
-	}
+type info struct {
+	referred bool
+	up       adt.ID
+	replace  adt.ID
+	reverse  adt.ID
 }
 
-// containsClosed reports whether c contains any CloseDef with ID x,
-// recursively.
-func containsClosed(c *CloseDef, x adt.ID) bool {
-	if c.ID == x && !c.IsAnd {
-		return true
+func (c *acceptor) Compact(all []adt.Conjunct) (compacted []CloseDef) {
+	a := c.Canopy
+	if len(a) == 0 {
+		return nil
 	}
-	for _, e := range c.List {
-		if containsClosed(e, x) {
-			return true
+
+	marked := make([]info, len(a))
+
+	c.markParents(0, marked)
+
+	// Mark all entries that cannot be dropped.
+	for _, x := range all {
+		c.markUsed(x.CloseID, marked)
+	}
+
+	// Compute compact numbers and reverse.
+	k := adt.ID(0)
+	for i, x := range marked {
+		if x.referred {
+			marked[i].replace = k
+			marked[k].reverse = adt.ID(i)
+			k++
 		}
 	}
-	return false
+
+	compacted = make([]CloseDef, k)
+
+	for i := range compacted {
+		orig := c.Canopy[marked[i].reverse]
+
+		and := orig.And
+		if and != embedRoot {
+			and = marked[orig.And].replace
+		}
+		compacted[i] = CloseDef{
+			Src:   orig.Src,
+			And:   and,
+			IsDef: orig.IsDef,
+		}
+
+		last := adt.ID(i)
+		for or := orig.NextEmbed; or != 0; or = c.Canopy[or].NextEmbed {
+			if marked[or].referred {
+				compacted[last].NextEmbed = marked[or].replace
+				last = marked[or].replace
+			}
+		}
+	}
+
+	// Update conjuncts
+	for i, x := range all {
+		all[i].CloseID = marked[x.ID()].replace
+	}
+
+	return compacted
+}
+
+func (c *acceptor) markParents(parent adt.ID, info []info) {
+	// Ands are arranged in a ring, so check for parent, not 0.
+	c.visitAnd(parent, func(i adt.ID, x CloseDef) bool {
+		c.visitEmbed(i, func(j adt.ID, x CloseDef) bool {
+			info[j-1].up = i
+			info[j].up = i
+			c.markParents(j, info)
+			return true
+		})
+		return true
+	})
+}
+
+func (c *acceptor) markUsed(id adt.ID, marked []info) {
+	if marked[id].referred {
+		return
+	}
+
+	if id > 0 && c.Canopy[id-1].And == embedRoot {
+		marked[id-1].referred = true
+	}
+
+	for i := id; !marked[i].referred; i = c.Canopy[i].And {
+		marked[i].referred = true
+	}
+
+	c.markUsed(marked[id].up, marked)
 }
diff --git a/internal/core/eval/closed_test.go b/internal/core/eval/closed_test.go
index 45511f6..9c1d3d7 100644
--- a/internal/core/eval/closed_test.go
+++ b/internal/core/eval/closed_test.go
@@ -15,164 +15,448 @@
 package eval
 
 import (
+	"fmt"
+	"strings"
 	"testing"
 
 	"cuelang.org/go/internal/core/adt"
-	"github.com/google/go-cmp/cmp"
 )
 
-func TestRewriteClosed(t *testing.T) {
+func TestInsert(t *testing.T) {
 	testCases := []struct {
-		desc    string
-		close   *CloseDef
-		replace map[adt.ID]*CloseDef
-		want    *CloseDef
+		tree   []CloseDef
+		typ    func(c *acceptor, at adt.ID, p adt.Node) adt.ID
+		at     adt.ID
+		wantID adt.ID
+		out    string
 	}{{
-		desc:  "introduce new",
-		close: nil,
-		replace: map[adt.ID]*CloseDef{
-			0: {ID: 2, IsAnd: false, List: nil},
-		},
-		want: &CloseDef{
-			ID: 0x02,
-		},
+		tree:   nil,
+		at:     0,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 1,
+		out:    "&( 0[] *1[])",
 	}, {
-		desc: "auto insert missing 0",
-		close: &CloseDef{
-			ID: 1,
-		},
-		replace: map[adt.ID]*CloseDef{
-			0: {ID: 2, IsAnd: false, List: nil},
-			1: nil, // keep 1
-		},
-		want: &CloseDef{
-			IsAnd: true,
-			List:  []*CloseDef{{ID: 1}, {ID: 2}},
-		},
+		tree:   []CloseDef{{}},
+		at:     0,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 1,
+		out:    "&( 0[] *1[])",
 	}, {
-		desc: "a: #A & #B",
-		close: &CloseDef{
-			ID: 1,
-		},
-		replace: map[adt.ID]*CloseDef{
-			1: {ID: 1, IsAnd: true, List: []*CloseDef{{ID: 2}, {ID: 3}}},
-		},
-		want: &CloseDef{
-			ID:    0x01,
-			IsAnd: true,
-			List:  []*CloseDef{{ID: 2}, {ID: 3}},
-		},
+		tree:   []CloseDef{0: {And: 1}, {And: 0, IsDef: true}},
+		at:     0,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 2,
+		out:    "&( 0[] *2[] *1[])",
 	}, {
-		desc: "eliminateUnusedToEmpty",
-		close: &CloseDef{
-			ID: 1,
-		},
-		replace: map[adt.ID]*CloseDef{
-			0: nil,
-		},
-		want: nil,
+		tree:   []CloseDef{0: {And: 1}, 1: {And: 0, IsDef: true}},
+		at:     1,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 2,
+		out:    "&( 0[] 1[] *2[])",
 	}, {
-		// Eliminate an embedding for which there are no more entries.
-		desc: "eliminateOneEmbedding",
-		close: &CloseDef{
-			ID: 0,
-			List: []*CloseDef{
-				{ID: 2},
-				{ID: 3},
-			},
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 2, IsDef: true},
+			2: {And: 0, IsDef: true},
 		},
-		replace: map[adt.ID]*CloseDef{2: nil},
-		want:    &CloseDef{ID: 2},
+		at:     1,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 3,
+		out:    "&( 0[] 1[] *3[] *2[])",
 	}, {
-		desc: "eliminateAllEmbeddings",
-		close: &CloseDef{
-			ID: 2,
-			List: []*CloseDef{
-				{ID: 2},
-				{ID: 3},
-			},
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 0, NextEmbed: 2, IsDef: true},
+			2: {And: embedRoot},
+			3: {And: 3},
 		},
-		replace: map[adt.ID]*CloseDef{0: {ID: 4}, 4: nil},
-		want:    &CloseDef{ID: 4},
+		at:     1,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 4,
+		out:    "&( 0[] 1[&( 3[])] *4[])",
 	}, {
-		// Do not eliminate an embedding that has a replacement.
-		desc: "eliminateOneEmbeddingByMultiple",
-		close: &CloseDef{
-			ID: 0,
-			List: []*CloseDef{
-				{ID: 2},
-				{ID: 3},
-			},
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 0, NextEmbed: 2, IsDef: true},
+			2: {And: embedRoot},
+			3: {And: 3},
 		},
-		replace: map[adt.ID]*CloseDef{
-			2: nil,
-			3: {ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
-		},
-		want: &CloseDef{
-			ID: 0x00,
-			List: []*CloseDef{
-				{ID: 2},
-				{ID: 3, IsAnd: true, List: []*CloseDef{{ID: 4}, {ID: 5}}},
-			},
-		},
+		at:     3,
+		typ:    (*acceptor).InsertDefinition,
+		wantID: 4,
+		out:    "&( 0[] *1[&( 3[] *4[])])",
 	}, {
-		// Select b within a
-		// a: {      // ID: 0
-		//     #A    // ID: 1
-		//     #B    // ID: 2
-		//     b: #C // ID: 0
-		// }
-		// #C: {
-		//     b: #D // ID: 3
-		// }
-		//
-		desc: "embeddingOverruledByField",
-		close: &CloseDef{
-			ID: 0,
-			List: []*CloseDef{
-				{ID: 1},
-				{ID: 2},
-				{ID: 0},
-			},
-		},
-		replace: map[adt.ID]*CloseDef{0: {ID: 3}},
-		want:    &CloseDef{ID: 3},
+		tree:   nil,
+		at:     0,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 2,
+		out:    "&( 0[&( 2[])])",
 	}, {
-		// Select b within a
-		// a: {      // ID: 0
-		//     #A    // ID: 1
-		//     #B    // ID: 2
-		//     b: #C // ID: 0
-		// }
-		// #C: {
-		//     b: #D & #E // ID: 3 & 4
-		// }
-		//
-		desc: "embeddingOverruledByMultiple",
-		close: &CloseDef{
-			ID: 0,
-			List: []*CloseDef{
-				{ID: 1},
-				{ID: 2},
-				{ID: 0},
-			},
+		tree:   []CloseDef{{}},
+		at:     0,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 2,
+		out:    "&( 0[&( 2[])])",
+	}, {
+		tree:   []CloseDef{0: {And: 1}, 1: {And: 0, IsDef: true}},
+		at:     1,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 3,
+		out:    "&( 0[] *1[&( 3[])])",
+	}, {
+		tree:   []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		at:     0,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 4,
+		out:    "&( 0[&( 4[])&( 2[])])",
+	}, {
+		tree:   []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		at:     1,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 4,
+		out:    "&( 0[&( 2[])&( 4[])])",
+	}, {
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 2},
+			2: {And: 0},
 		},
-		replace: map[adt.ID]*CloseDef{
-			0: {IsAnd: true, List: []*CloseDef{{ID: 3}, {ID: 4}}},
-		},
-		want: &CloseDef{
-			ID:    0,
-			IsAnd: true,
-			List:  []*CloseDef{{ID: 3}, {ID: 4}},
-		},
+		at:     2,
+		typ:    (*acceptor).InsertEmbed,
+		wantID: 4,
+		out:    "&( 0[] 1[] 2[&( 4[])])",
 	}}
+	for i, tc := range testCases {
+		t.Run(fmt.Sprint(i), func(t *testing.T) {
+			c := &acceptor{Canopy: tc.tree}
+			got := tc.typ(c, tc.at, nil)
 
-	for _, tc := range testCases {
-		t.Run(tc.desc, func(t *testing.T) {
-			got := updateClosed(tc.close, tc.replace)
-			if !cmp.Equal(got, tc.want) {
-				t.Error(cmp.Diff(got, tc.want))
+			if got != tc.wantID {
+				t.Errorf("id: got %d; want %d", got, tc.wantID)
+			}
+
+			w := &strings.Builder{}
+			// fmt.Fprintf(w, "%#v", c.Canopy)
+			writeConjuncts(w, c)
+			if got := w.String(); got != tc.out {
+				t.Errorf("id: got %s; want %s", got, tc.out)
 			}
 		})
 	}
 }
+
+func TestInsertSubtree(t *testing.T) {
+	testCases := []struct {
+		tree []CloseDef
+		at   adt.ID
+		sub  []CloseDef
+		out  string
+	}{{
+		tree: nil,
+		at:   0,
+		sub:  nil,
+		out:  "&( 0[&( 2[])])",
+	}, {
+		tree: []CloseDef{{}},
+		at:   0,
+		sub:  nil,
+		out:  "&( 0[&( 2[])])",
+	}, {
+		tree: []CloseDef{0: {And: 1}, {And: 0, IsDef: true}},
+		at:   0,
+		sub:  []CloseDef{{}},
+		out:  "&( 0[&( 3[])] *1[])",
+	}, {
+		tree: []CloseDef{0: {And: 1}, {And: 0, IsDef: true}},
+		at:   0,
+		sub:  []CloseDef{{And: 1}, {And: 0, IsDef: true}},
+		out:  "&( 0[&( 3[] *4[])] *1[])",
+	}, {
+		tree: []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		at:   0,
+		sub:  []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		out:  "&( 0[&( 4[&( 6[])])&( 2[])])",
+	}, {
+		tree: []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		at:   2,
+		sub:  []CloseDef{0: {NextEmbed: 1}, 1: {And: embedRoot}, 2: {And: 2}},
+		out:  "&( 0[&( 2[&( 4[&( 6[])])])])",
+	}}
+	for i, tc := range testCases {
+		t.Run(fmt.Sprint(i), func(t *testing.T) {
+			c := &acceptor{Canopy: tc.tree}
+			d := &acceptor{Canopy: tc.sub}
+			n := &nodeContext{nodeShared: &nodeShared{node: &adt.Vertex{}}}
+			c.InsertSubtree(tc.at, n, &adt.Vertex{Closed: d}, false)
+
+			w := &strings.Builder{}
+			// fmt.Fprintf(w, "%#v", c.Canopy)
+			writeConjuncts(w, c)
+			if got := w.String(); got != tc.out {
+				t.Errorf("id: got %s; want %s", got, tc.out)
+			}
+		})
+	}
+}
+func TestVerifyArcAllowed(t *testing.T) {
+	fields := func(a ...adt.Feature) []adt.Feature { return a }
+	results := func(a ...bool) []bool { return a }
+	fieldSets := func(a ...[]adt.Feature) [][]adt.Feature { return a }
+
+	testCases := []struct {
+		desc     string
+		isClosed bool
+		tree     []CloseDef
+		fields   [][]adt.Feature
+		check    []adt.Feature
+		want     []bool
+	}{{
+		desc:     "required and remains closed with embedding",
+		isClosed: true,
+		tree: []CloseDef{
+			{And: 0},
+		},
+		fields: fieldSets(
+			fields(1),
+		),
+		check: fields(2),
+		want:  results(false),
+	}, {
+
+		// 	desc:  "empty tree accepts everything",
+		// 	tree:  nil,
+		// 	check: feats(1),
+		// 	want:  results(true),
+		// }, {
+		desc: "feature required in one",
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 0, IsDef: true},
+		},
+		fields: fieldSets(
+			fields(1),
+			fields(2),
+		),
+		check: fields(1, 2, 3, 4),
+		want:  results(false, true, false, false),
+	}, {
+		desc: "feature required in both",
+		tree: []CloseDef{
+			0: {And: 1, IsDef: true},
+			1: {And: 0, IsDef: true},
+		},
+		fields: fieldSets(
+			fields(1, 3),
+			fields(2, 3),
+		),
+		check: fields(1, 2, 3, 4),
+		want:  results(false, false, true, false),
+	}, {
+		desc: "feature required in neither",
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 0},
+		},
+		fields: fieldSets(
+			fields(1, 3),
+			fields(2, 3),
+		),
+		check: fields(1, 2, 3, 4),
+		want:  results(true, true, true, true),
+	}, {
+		desc: "closedness of embed",
+		tree: []CloseDef{
+			0: {And: 1},
+			1: {And: 0, IsDef: true, NextEmbed: 2},
+			2: {And: -1},
+			3: {And: 4},
+			4: {And: 3, IsDef: true},
+		},
+		fields: fieldSets(
+			fields(3, 4),
+			fields(),
+			fields(),
+			fields(),
+			fields(3),
+		),
+		check: fields(1, 3, 4),
+		want:  results(false, true, false),
+	}, {
+		desc: "implied required through embedding",
+		tree: []CloseDef{
+			0: {And: 0, NextEmbed: 1},
+			1: {And: -1},
+			2: {And: 3},
+			3: {And: 2, IsDef: true},
+		},
+		fields: fieldSets(
+			fields(3, 4),
+			fields(),
+			fields(),
+			fields(3, 2),
+		),
+		check: fields(1, 2, 3, 4),
+		want:  results(false, true, true, true),
+	}, {
+		desc: "implied required through recursive embedding",
+		tree: []CloseDef{
+			0: {And: 0, NextEmbed: 1},
+			1: {And: -1},
+			2: {And: 2, NextEmbed: 3},
+			3: {And: -1},
+			4: {And: 5},
+			5: {And: 4, IsDef: true},
+		},
+		fields: fieldSets(
+			fields(3, 4),
+			fields(),
+			fields(),
+			fields(),
+			fields(),
+			fields(3, 2),
+		),
+		check: fields(1, 2, 3, 4),
+		want:  results(false, true, true, true),
+	}, {
+		desc: "nil fieldSets",
+		tree: []CloseDef{
+			0: {And: 0, NextEmbed: 1},
+			1: {And: -1},
+			2: {And: 3},
+			3: {And: 2, IsDef: true},
+		},
+		fields: fieldSets(
+			nil,
+			nil,
+			fields(1),
+			fields(2),
+		),
+		check: fields(1, 2, 3),
+		want:  results(false, true, false),
+	}, {
+		desc: "required and remains closed with embedding",
+		tree: []CloseDef{
+			{And: 1},
+			{And: 0, NextEmbed: 2, IsDef: true},
+			{And: -1},
+			{And: 3},
+		},
+		fields: fieldSets(
+			fields(1),
+			fields(),
+			nil,
+			fields(2),
+		),
+		check: fields(0, 1, 2, 3),
+		want:  results(false, false, true, false),
+	}}
+	for i, tc := range testCases {
+		t.Run(fmt.Sprint(i, "/", tc.desc), func(t *testing.T) {
+			c := &acceptor{Canopy: tc.tree, isClosed: tc.isClosed}
+			for _, f := range tc.fields {
+				if f == nil {
+					c.Fields = append(c.Fields, nil)
+					continue
+				}
+				fs := &fieldSet{}
+				c.Fields = append(c.Fields, fs)
+				for _, id := range f {
+					fs.fields = append(fs.fields, field{label: id})
+				}
+			}
+
+			ctx := &adt.OpContext{}
+
+			for i, f := range tc.check {
+				got := c.verify(ctx, f)
+				if got != tc.want[i] {
+					t.Errorf("%d/%d: got %v; want %v", i, f, got, tc.want[i])
+				}
+			}
+		})
+	}
+}
+
+func TestCompact(t *testing.T) {
+	testCases := []struct {
+		desc      string
+		tree      []CloseDef
+		conjuncts []adt.ID
+		out       string
+	}{{
+		desc:      "leave nil as is",
+		tree:      nil,
+		conjuncts: []adt.ID{0, 0},
+		out:       "nil",
+	}, {
+		desc:      "simplest case",
+		tree:      []CloseDef{{}},
+		conjuncts: []adt.ID{0, 0},
+		out:       "&( 0[])",
+	}, {
+		desc:      "required ands should not be removed",
+		tree:      []CloseDef{{And: 1}, {And: 0, IsDef: true}},
+		conjuncts: []adt.ID{0, 0},
+		out:       "&( 0[] *1[])",
+		// }, {
+		// 	// TODO:
+		// 	desc:      "required ands should not be removed",
+		// 	tree:      []Node{{And: 1}, {And: 0, Required: false}},
+		// 	conjuncts: []adt.ID{1},
+		// 	out:       "&( 0[] 1[])",
+	}, {
+		desc:      "remove embedding",
+		tree:      []CloseDef{{NextEmbed: 1}, {And: embedRoot}, {And: 2}},
+		conjuncts: []adt.ID{0},
+		out:       "&( 0[])",
+	}, {
+		desc:      "keep embedding if used",
+		tree:      []CloseDef{{NextEmbed: 1}, {And: embedRoot}, {And: 2}},
+		conjuncts: []adt.ID{0, 2},
+		out:       "&( 0[&( 2[])])",
+	}}
+	for _, tc := range testCases {
+		t.Run(tc.desc, func(t *testing.T) {
+			c := &acceptor{Canopy: tc.tree}
+			all := []adt.Conjunct{}
+			for _, id := range tc.conjuncts {
+				all = append(all, adt.Conjunct{CloseID: id})
+			}
+
+			c.Canopy = c.Compact(all)
+
+			w := &strings.Builder{}
+			// fmt.Fprintf(w, "%#v", c.Canopy)
+			writeConjuncts(w, c)
+			if got := w.String(); got != tc.out {
+				t.Errorf("id: got %s; want %s", got, tc.out)
+			}
+		})
+	}
+}
+
+func writeConjuncts(w *strings.Builder, c *acceptor) {
+	if len(c.Canopy) == 0 {
+		w.WriteString("nil")
+		return
+	}
+	writeAnd(w, c, 0)
+}
+
+func writeAnd(w *strings.Builder, c *acceptor, id adt.ID) {
+	w.WriteString("&(")
+	c.visitAnd(id, func(id adt.ID, n CloseDef) bool {
+		w.WriteString(" ")
+		if n.IsDef || n.IsClosed {
+			w.WriteString("*")
+		}
+		fmt.Fprintf(w, "%d[", id)
+		c.visitEmbed(id, func(id adt.ID, n CloseDef) bool {
+			writeAnd(w, c, id)
+			return true
+		})
+		w.WriteString("]")
+		return true
+	})
+	w.WriteString(")")
+}
diff --git a/internal/core/eval/disjunct.go b/internal/core/eval/disjunct.go
index 6028093..9d06174 100644
--- a/internal/core/eval/disjunct.go
+++ b/internal/core/eval/disjunct.go
@@ -88,7 +88,6 @@
 	values      []disjunct
 	numDefaults int
 	cloneID     adt.ID
-	isEmbed     bool
 }
 
 type disjunct struct {
@@ -96,7 +95,7 @@
 	isDefault bool
 }
 
-func (n *nodeContext) addDisjunction(env *adt.Environment, x *adt.DisjunctionExpr, cloneID adt.ID, isEmbed bool) {
+func (n *nodeContext) addDisjunction(env *adt.Environment, x *adt.DisjunctionExpr, cloneID adt.ID) {
 	a := []disjunct{}
 
 	numDefaults := 0
@@ -113,10 +112,10 @@
 	})
 
 	n.disjunctions = append(n.disjunctions,
-		envDisjunct{env, a, numDefaults, cloneID, isEmbed})
+		envDisjunct{env, a, numDefaults, cloneID})
 }
 
-func (n *nodeContext) addDisjunctionValue(env *adt.Environment, x *adt.Disjunction, cloneID adt.ID, isEmbed bool) {
+func (n *nodeContext) addDisjunctionValue(env *adt.Environment, x *adt.Disjunction, cloneID adt.ID) {
 	a := []disjunct{}
 
 	for i, v := range x.Values {
@@ -124,7 +123,7 @@
 	}
 
 	n.disjunctions = append(n.disjunctions,
-		envDisjunct{env, a, x.NumDefaults, cloneID, isEmbed})
+		envDisjunct{env, a, x.NumDefaults, cloneID})
 }
 
 func (n *nodeContext) updateResult() (isFinal bool) {
@@ -298,7 +297,7 @@
 	v := d.values[k]
 	n.isFinal = n.isFinal && k == len(d.values)-1
 	c := adt.MakeConjunct(d.env, v.expr, d.cloneID)
-	n.addExprConjunct(c, d.isEmbed)
+	n.addExprConjunct(c)
 
 	for n.expandOne() {
 	}
diff --git a/internal/core/eval/eval.go b/internal/core/eval/eval.go
index bc5a477..4b81ce0 100644
--- a/internal/core/eval/eval.go
+++ b/internal/core/eval/eval.go
@@ -36,6 +36,8 @@
 	"cuelang.org/go/internal/core/debug"
 )
 
+var Debug = false
+
 // TODO TODO TODO TODO TODO TODO  TODO TODO TODO  TODO TODO TODO  TODO TODO TODO
 //
 // - Reuse work from previous cycles. For instance, if we can guarantee that a
@@ -110,18 +112,12 @@
 }
 
 type Evaluator struct {
-	r       adt.Runtime
-	index   adt.StringIndexer
-	closeID adt.ID
+	r     adt.Runtime
+	index adt.StringIndexer
 
 	stats Stats
 }
 
-func (e *Evaluator) nextID() adt.ID {
-	e.closeID++
-	return e.closeID
-}
-
 func (e *Evaluator) Eval(v *adt.Vertex) errors.Error {
 	if v.Value == nil {
 		ctx := adt.NewContext(e.r, e, v)
@@ -326,6 +322,36 @@
 	}
 	saved := *v
 
+	var closedInfo *acceptor
+	if v.Parent != nil {
+		closedInfo, _ = v.Parent.Closed.(*acceptor)
+	}
+
+	if !v.Label.IsInt() && closedInfo != nil {
+		ci := closedInfo
+		// Visit arcs recursively to validate and compute error.
+		switch ok, err := ci.verifyArc(c, v.Label, v); {
+		case err != nil:
+			// Record error in child node to allow recording multiple
+			// conflicts at the appropriate place, to allow valid fields to
+			// be represented normally and, most importantly, to avoid
+			// recursive processing of a disallowed field.
+			v.Value = err
+			v.SetValue(c, adt.Finalized, err)
+			return shared
+
+		case !ok: // hidden field
+			// A hidden field is except from checking. Ensure that the
+			// closedness doesn't carry over into children.
+			// TODO: make this more fine-grained per package, allowing
+			// checked restrictions to be defined within the package.
+			closedInfo = closedInfo.clone()
+			for i := range closedInfo.Canopy {
+				closedInfo.Canopy[i].IsDef = false
+			}
+		}
+	}
+
 	defer c.PopArc(c.PushArc(v))
 
 	e.stats.UnifyCount++
@@ -344,6 +370,11 @@
 		// validation has taken place.
 		*v = saved
 		v.Value = cycle
+
+		if closedInfo != nil {
+			v.Closed = closedInfo.clone()
+		}
+
 		v.UpdateStatus(adt.Evaluating)
 
 		// If the result is a struct, it needs to be closed if:
@@ -352,6 +383,9 @@
 		//      recursively.
 		//   3) this node embeds a closed struct.
 		needClose := v.Label.IsDef()
+		// if needClose {
+		// 	closedInfo.isClosed = true
+		// }
 
 		n := &nodeContext{
 			kind:       adt.TopKind,
@@ -366,7 +400,7 @@
 		for _, x := range v.Conjuncts {
 			// TODO: needed for reentrancy. Investigate usefulness for cycle
 			// detection.
-			n.addExprConjunct(x, true)
+			n.addExprConjunct(x)
 		}
 
 		if i == 0 {
@@ -561,42 +595,6 @@
 }
 
 func (n *nodeContext) updateClosedInfo() {
-	ctx := n.ctx
-
-	var c *CloseDef
-	if a, _ := n.node.Closed.(*acceptor); a != nil {
-		c = a.tree
-		n.needClose = n.needClose || a.isClosed
-	}
-
-	replace := n.replace
-	if replace == nil {
-		replace = map[adt.ID]*CloseDef{}
-	}
-
-	// Mark any used CloseID to keep, if not already replaced.
-	for _, x := range n.optionals {
-		if _, ok := replace[x.id]; !ok {
-			replace[x.id] = nil
-		}
-	}
-	for _, a := range n.node.Arcs {
-		for _, c := range a.Conjuncts {
-			if c.Env != nil {
-				if _, ok := replace[c.ID()]; !ok {
-					replace[c.ID()] = nil
-				}
-			}
-		}
-	}
-
-	updated := updateClosed(c, replace)
-	if updated == nil && n.needClose {
-		updated = &CloseDef{Src: n.node}
-	}
-
-	// TODO retrieve from env.
-
 	if err := n.getErr(); err != nil {
 		if b, _ := n.node.Value.(*adt.Bottom); b != nil {
 			err = adt.CombineErrors(nil, b, err)
@@ -606,35 +604,34 @@
 		// later. Logically we're done.
 	}
 
-	m := &acceptor{
-		tree:     updated,
-		fields:   n.optionals,
-		isClosed: n.needClose,
-		openList: n.openList,
-		isList:   n.node.IsList(),
-	}
-	if updated != nil || len(n.optionals) > 0 {
-		n.node.Closed = m
+	if n.node.IsList() {
+		return
 	}
 
-	// Visit arcs recursively to validate and compute error.
-	for _, a := range n.node.Arcs {
-		if updated != nil {
-			a.Closed = m
+	a, _ := n.node.Closed.(*acceptor)
+	if a == nil {
+		if !n.node.IsList() && !n.needClose {
+			return
 		}
-		if updated != nil && m.isClosed {
-			if accept := n.nodeShared.accept; accept != nil {
-				if !accept.Accept(n.ctx, a.Label) {
-					label := a.Label.SelectorString(ctx)
-					n.node.Value = ctx.NewErrf(
-						"field `%s` not allowed by Acceptor", label)
-				}
-			} else if err := m.verifyArcAllowed(n.ctx, a.Label); err != nil {
-				n.node.Value = err
+		a = closedInfo(n.node)
+	}
+
+	// TODO: set this earlier.
+	n.needClose = n.needClose || a.isClosed
+	a.isClosed = n.needClose
+
+	// TODO: consider removing this. Now checking is done at the top of
+	// evalVertex, skipping one level of checks happens naturally again
+	// for Value.Unify (API).
+	ctx := n.ctx
+
+	if accept := n.nodeShared.accept; accept != nil {
+		for _, a := range n.node.Arcs {
+			if !accept.Accept(n.ctx, a.Label) {
+				label := a.Label.SelectorString(ctx)
+				n.node.Value = ctx.NewErrf(
+					"field `%s` not allowed by Acceptor", label)
 			}
-			// TODO: use continue to not process already failed fields,
-			// or at least don't record recursive error.
-			// continue
 		}
 	}
 }
@@ -737,23 +734,15 @@
 	dynamicFields []envDynamic
 	ifClauses     []envYield
 	forClauses    []envYield
-	optionals     []fieldSet // env + field
-	// NeedClose:
-	// - node starts definition
-	// - embeds a definition
-	// - parent node is closing
+	// TODO: remove this and annotate directly in acceptor.
 	needClose bool
-	openList  bool
 	aStruct   adt.Expr
 	hasTop    bool
-	newClose  *CloseDef
-	// closeID   adt.ID // from parent, or if not exist, new if introducing a def.
-	replace map[adt.ID]*CloseDef
 
 	// Expression conjuncts
 	lists  []envList
 	vLists []*adt.Vertex
-	exprs  []conjunct
+	exprs  []adt.Conjunct
 
 	hasCycle    bool // has conjunct with structural cycle
 	hasNonCycle bool // has conjunct without structural cycle
@@ -764,6 +753,15 @@
 	isFinal      bool
 }
 
+func closedInfo(n *adt.Vertex) *acceptor {
+	a, _ := n.Closed.(*acceptor)
+	if a == nil {
+		a = &acceptor{}
+		n.Closed = a
+	}
+	return a
+}
+
 func (n *nodeContext) updateNodeType(k adt.Kind, v adt.Expr) bool {
 	ctx := n.ctx
 	kind := n.kind & k
@@ -883,11 +881,6 @@
 	}
 }
 
-type conjunct struct {
-	adt.Conjunct
-	top bool
-}
-
 type envDynamic struct {
 	env   *adt.Environment
 	field *adt.DynamicField
@@ -923,7 +916,7 @@
 // addExprConjuncts will attempt to evaluate an adt.Expr and insert the value
 // into the nodeContext if successful or queue it for later evaluation if it is
 // incomplete or is not value.
-func (n *nodeContext) addExprConjunct(v adt.Conjunct, top bool) {
+func (n *nodeContext) addExprConjunct(v adt.Conjunct) {
 	env := v.Env
 	id := v.CloseID
 
@@ -933,14 +926,14 @@
 
 	case *adt.BinaryExpr:
 		if x.Op == adt.AndOp {
-			n.addExprConjunct(adt.MakeConjunct(env, x.X, id), false)
-			n.addExprConjunct(adt.MakeConjunct(env, x.Y, id), false)
+			n.addExprConjunct(adt.MakeConjunct(env, x.X, id))
+			n.addExprConjunct(adt.MakeConjunct(env, x.Y, id))
 		} else {
-			n.evalExpr(v, top)
+			n.evalExpr(v)
 		}
 
 	case *adt.StructLit:
-		n.addStruct(env, x, id, top)
+		n.addStruct(env, x, id)
 
 	case *adt.ListLit:
 		n.lists = append(n.lists, envList{env: env, list: x, id: id})
@@ -949,20 +942,16 @@
 		if n.disjunctions != nil {
 			_ = n.disjunctions
 		}
-		n.addDisjunction(env, x, id, top)
+		n.addDisjunction(env, x, id)
 
 	default:
 		// Must be Resolver or Evaluator.
-		n.evalExpr(v, top)
-	}
-
-	if top {
-		n.updateReplace(v.CloseID)
+		n.evalExpr(v)
 	}
 }
 
 // evalExpr is only called by addExprConjunct.
-func (n *nodeContext) evalExpr(v adt.Conjunct, top bool) {
+func (n *nodeContext) evalExpr(v adt.Conjunct) {
 	// Require an Environment.
 	ctx := n.ctx
 
@@ -997,7 +986,7 @@
 			}
 		}
 		if arc == nil {
-			n.exprs = append(n.exprs, conjunct{v, top})
+			n.exprs = append(n.exprs, v)
 			break
 		}
 
@@ -1059,21 +1048,22 @@
 			defer func() { arc.SelfCount-- }()
 		}
 
+		if arc.Label.IsDef() { // should test whether closed, not isDef?
+			c := closedInfo(n.node)
+			closeID = c.InsertDefinition(v.CloseID, x)
+			n.needClose = true // TODO: is this still necessary?
+		}
+
 		// TODO: uncommenting the following almost works, but causes some
 		// faulty results in complex cycle handling between disjunctions.
 		// The reason is that disjunctions must be eliminated if checks in
 		// values on which they depend fail.
 		ctx.Unify(ctx, arc, adt.Finalized)
 
-		if arc.Label.IsDef() { // should test whether closed, not isDef?
-			id := n.eval.nextID()
-			n.insertClosed(arc, id, cyclic, arc)
-		} else {
-			for _, c := range arc.Conjuncts {
-				c = updateCyclic(c, cyclic, arc)
-				c.CloseID = closeID
-				n.addExprConjunct(c, top)
-			}
+		for _, c := range arc.Conjuncts {
+			c = updateCyclic(c, cyclic, arc)
+			c.CloseID = closeID
+			n.addExprConjunct(c)
 		}
 
 	case adt.Evaluator:
@@ -1081,7 +1071,7 @@
 		// Could be unify?
 		val, complete := ctx.Evaluate(v.Env, v.Expr())
 		if !complete {
-			n.exprs = append(n.exprs, conjunct{v, top})
+			n.exprs = append(n.exprs, v)
 			break
 		}
 
@@ -1092,7 +1082,7 @@
 			if ok && b.IsIncomplete() && len(v.Conjuncts) > 0 {
 				for _, c := range v.Conjuncts {
 					c.CloseID = closeID
-					n.addExprConjunct(c, top)
+					n.addExprConjunct(c)
 				}
 				break
 			}
@@ -1139,68 +1129,41 @@
 	return adt.MakeConjunct(env, c.Expr(), c.CloseID)
 }
 
-func (n *nodeContext) insertClosed(arc *adt.Vertex, id adt.ID, cyclic bool, deref *adt.Vertex) {
-	n.needClose = true
-
-	current := n.newClose
-	n.newClose = nil
-
-	for _, c := range arc.Conjuncts {
-		c = updateCyclic(c, cyclic, deref)
-		c.CloseID = id
-		n.addExprConjunct(c, false)
-	}
-
-	current, n.newClose = n.newClose, current
-
-	if current == nil {
-		current = &CloseDef{ID: id, Src: arc}
-	}
-	n.addAnd(current)
-}
-
 func (n *nodeContext) addValueConjunct(env *adt.Environment, v adt.Value, id adt.ID) {
 	n.updateCyclicStatus(env)
 
+	ctx := n.ctx
+
 	if x, ok := v.(*adt.Vertex); ok {
-		needClose := false
-		if isStruct(x) {
-			n.aStruct = x
-			// TODO: find better way to mark as struct.
-			// For instance, we may want to add a faux
-			// Structlit for topological sort.
-			// return
-
-			if x.IsClosed(n.ctx) {
-				needClose = true
-			}
-
-			n.node.AddStructs(x.Structs...)
+		if m, ok := x.Value.(*adt.StructMarker); ok && m.NeedClose {
+			ci := closedInfo(n.node)
+			ci.isClosed = true // TODO: remove
+			id = ci.InsertDefinition(id, x)
+			ci.Canopy[id].IsClosed = true
+			ci.Canopy[id].IsDef = false
 		}
 
 		if !x.IsData() && len(x.Conjuncts) > 0 {
 			cyclic := env != nil && env.Cyclic
-			if needClose {
-				n.insertClosed(x, id, cyclic, nil)
+
+			if isComplexStruct(x) {
+				closedInfo(n.node).InsertSubtree(id, n, x, cyclic)
 				return
 			}
+
 			for _, c := range x.Conjuncts {
 				c = updateCyclic(c, cyclic, nil)
 				c.CloseID = id
-				n.addExprConjunct(c, false) // TODO: Pass from eval
+				n.addExprConjunct(c) // TODO: Pass from eval
 			}
 			return
 		}
 
-		if x.IsList() {
-			n.vLists = append(n.vLists, x)
-			return
-		}
-
 		// TODO: evaluate value?
 		switch v := x.Value.(type) {
 		case *adt.ListMarker:
-			panic("unreachable")
+			n.vLists = append(n.vLists, x)
+			return
 
 		case *adt.StructMarker:
 			for _, a := range x.Arcs {
@@ -1237,11 +1200,9 @@
 		return
 	}
 
-	ctx := n.ctx
-
 	switch x := v.(type) {
 	case *adt.Disjunction:
-		n.addDisjunctionValue(env, x, id, true)
+		n.addDisjunctionValue(env, x, id)
 
 	case *adt.Conjunction:
 		for _, x := range x.Values {
@@ -1251,7 +1212,13 @@
 	case *adt.Top:
 		n.hasTop = true
 		// TODO: Is this correct. Needed for elipsis, but not sure for others.
-		n.optionals = append(n.optionals, fieldSet{env: env, id: id, isOpen: true})
+		// n.optionals = append(n.optionals, fieldSet{env: env, id: id, isOpen: true})
+		if a, _ := n.node.Closed.(*acceptor); a != nil {
+			// Embedding top indicates that now all possible values are allowed
+			// and that we should no longer enforce closedness within this
+			// conjunct.
+			a.node(id).IsDef = false
+		}
 
 	case *adt.BasicType:
 		// handled above
@@ -1320,8 +1287,7 @@
 func (n *nodeContext) addStruct(
 	env *adt.Environment,
 	s *adt.StructLit,
-	closeID adt.ID,
-	top bool) {
+	closeID adt.ID) {
 
 	n.updateCyclicStatus(env) // to handle empty structs.
 
@@ -1382,24 +1348,16 @@
 
 		case adt.Expr:
 			hasEmbed = true
+			hasOther = x
+
+			// add embedding to optional
+
+			// TODO(perf): only do this if addExprConjunct below will result in
+			// a fieldSet. Otherwise the entry will just be removed next.
+			id := closedInfo(n.node).InsertEmbed(closeID, x)
 
 			// push and opo embedding type.
-			id := n.eval.nextID()
-
-			current := n.newClose
-			n.newClose = nil
-
-			hasOther = x
-			n.addExprConjunct(adt.MakeConjunct(childEnv, x, id), false)
-
-			current, n.newClose = n.newClose, current
-
-			if current == nil {
-				current = &CloseDef{Src: s, ID: id} // TODO: isClosed?
-			} else {
-				// n.needClose = true
-			}
-			n.addOr(closeID, current)
+			n.addExprConjunct(adt.MakeConjunct(childEnv, x, id))
 
 		case *adt.BulkOptionalField:
 			n.aStruct = s
@@ -1430,7 +1388,7 @@
 		opt.MatchAndInsert(ctx, arc)
 	}
 
-	n.optionals = append(n.optionals, opt)
+	closedInfo(n.node).insertFieldSet(closeID, &opt)
 
 	for _, d := range s.Decls {
 		switch x := d.(type) {
@@ -1451,9 +1409,9 @@
 	arc.AddConjunct(x)
 
 	if isNew {
-		for _, o := range n.optionals {
+		closedInfo(n.node).visitAllFieldSets(func(o *fieldSet) {
 			o.MatchAndInsert(ctx, arc)
-		}
+		})
 	}
 	return arc
 }
@@ -1497,7 +1455,7 @@
 	exprs := n.exprs
 	n.exprs = n.exprs[:0]
 	for _, x := range exprs {
-		n.addExprConjunct(x.Conjunct, x.top)
+		n.addExprConjunct(x)
 
 		// collect and and or
 	}
@@ -1570,7 +1528,7 @@
 		}
 
 		for _, st := range sa {
-			n.addStruct(st.env, st.s, d.id, true)
+			n.addStruct(st.env, st.s, d.id)
 		}
 	}
 
@@ -1717,7 +1675,8 @@
 			continue // error generated earlier, if applicable.
 		}
 
-		n.optionals = append(n.optionals, a.fields...)
+		// TODO: why not necessary?
+		// n.optionals = append(n.optionals, a.fields...)
 
 		for _, arc := range elems[len(newElems):] {
 			l.MatchAndInsert(c, arc)
@@ -1729,10 +1688,10 @@
 			continue
 		}
 
-		f := fieldSet{pos: l.list, env: l.env, id: l.id}
+		f := &fieldSet{pos: l.list, env: l.env, id: l.id}
 		f.AddEllipsis(c, l.elipsis)
 
-		n.optionals = append(n.optionals, f)
+		closedInfo(n.node).insertFieldSet(l.id, f)
 
 		for _, arc := range elems[l.n:] {
 			f.MatchAndInsert(c, arc)
@@ -1750,7 +1709,9 @@
 		}
 	}
 
-	n.openList = isOpen
+	ci := closedInfo(n.node)
+	ci.isList = true
+	ci.openList = isOpen
 
 	if m, ok := n.node.Value.(*adt.ListMarker); !ok {
 		n.node.SetValue(c, adt.Partial, &adt.ListMarker{
diff --git a/internal/core/eval/eval_test.go b/internal/core/eval/eval_test.go
index 1344a76..bf61212 100644
--- a/internal/core/eval/eval_test.go
+++ b/internal/core/eval/eval_test.go
@@ -115,7 +115,9 @@
 	if err != nil {
 		t.Fatal(err)
 	}
-	t.Error(debug.NodeString(r, v, nil))
+
+	// t.Error(debug.NodeString(r, v, nil))
+	// eval.Debug = true
 
 	e := eval.New(r)
 	ctx := e.NewContext(v)
diff --git a/tools/trim/trim_test.go b/tools/trim/trim_test.go
index 28010bc..f016e51 100644
--- a/tools/trim/trim_test.go
+++ b/tools/trim/trim_test.go
@@ -135,7 +135,7 @@
 		x: >=5 & <=8 & int
 	}
 
-	t: u: { x: 5 }
+	t: u: { x: 5 } // TODO: should be t: u: {}
 }
 
 group: {
@@ -157,7 +157,7 @@
 		x: >=5 & <=8 & int
 	}
 
-	t: u: {}
+	t: u: {x: 5}
 }
 
 group: {