cue: fix semantics of field comprehensions

They were previously defined as struct comprehensions,
which is slightly different.

As a result, trim’s behavior changed, as this change makes
it harder to detect whether the source of a field is a
comprehension in evaluated code.

Change-Id: I59ec737bc8cc22cc4bc5909fbc9dc7e7d7c7aa5c
diff --git a/cmd/cue/cmd/testdata/loaderr/loaderr.out b/cmd/cue/cmd/testdata/loaderr/loaderr.out
index 68b2374..2921045 100644
--- a/cmd/cue/cmd/testdata/loaderr/loaderr.out
+++ b/cmd/cue/cmd/testdata/loaderr/loaderr.out
@@ -1,2 +1,3 @@
-cannot find package "non-existing"
+cannot find package "non-existing":
+    
 terminating because of errors
diff --git a/cmd/cue/cmd/trim.go b/cmd/cue/cmd/trim.go
index cac1c8d..8265ede 100644
--- a/cmd/cue/cmd/trim.go
+++ b/cmd/cue/cmd/trim.go
@@ -139,7 +139,7 @@
 		for _, f := range inst.Files {
 			filename := f.Filename
 
-			f.Decls = gen.trimDecls(f.Decls, rm, root, false)
+			f.Decls = gen.trimDecls(f.Decls, rm, root, true)
 
 			opts := []format.Option{}
 			if *fSimplify {
@@ -176,6 +176,7 @@
 	stack     []string
 	exclude   map[ast.Node]bool // don't remove fields marked here
 	alwaysGen map[ast.Node]bool // node is always from a generated source
+	fromComp  map[ast.Node]bool // node originated from a comprehension
 }
 
 func newTrimSet(fset *token.FileSet) *trimSet {
@@ -183,6 +184,7 @@
 		fset:      fset,
 		exclude:   map[ast.Node]bool{},
 		alwaysGen: map[ast.Node]bool{},
+		fromComp:  map[ast.Node]bool{},
 	}
 }
 
@@ -212,23 +214,37 @@
 
 		case *ast.ListLit:
 			if x.Type != nil {
-				t.markAlwaysGen(x.Type)
+				t.markAlwaysGen(x.Type, false)
 			}
 
 		case *ast.Field:
 			if _, ok := x.Label.(*ast.TemplateLabel); ok {
-				t.markAlwaysGen(x.Value)
+				t.markAlwaysGen(x.Value, false)
 			}
 
 		case *ast.ListComprehension, *ast.ComprehensionDecl:
-			t.markAlwaysGen(x)
+			t.markAlwaysGen(x, true)
 		}
 	})
 }
 
-func (t *trimSet) markAlwaysGen(n ast.Node) {
-	ast.Walk(n, func(n ast.Node) bool {
+func (t *trimSet) markAlwaysGen(first ast.Node, isComp bool) {
+	ast.Walk(first, func(n ast.Node) bool {
+		if t.alwaysGen[n] {
+			return false
+		}
 		t.alwaysGen[n] = true
+		if isComp {
+			t.fromComp[n] = true
+		}
+		if x, ok := n.(*ast.Ident); ok && n != first {
+			// Also mark any value used within a template.
+			if x.Node != nil {
+				// fmt.Println("MARKED", internal.DebugStr(x.Node),
+				// "by", internal.DebugStr(first))
+				// t.markAlwaysGen(x.Node)
+			}
+		}
 		return true
 	}, nil)
 }
@@ -279,6 +295,10 @@
 //   based on another value without doing a full unification. So for now we
 //   skip any disjunction containing structs.
 //
+//		v      the current value
+//		m      the "mixed-in" values
+//		scope  in which to evaluate expressions
+//		rmSet  nodes in v that may be removed by the caller
 func (t *trimSet) trim(label string, v, m, scope cue.Value) (rmSet []ast.Node) {
 	saved := t.stack
 	t.stack = append(t.stack, label)
@@ -296,32 +316,36 @@
 		}
 	}
 
+	// Collect generated nodes.
+	// Only keep the good parts of the template.
+	// Incoming structs may be incomplete resulting in errors. It is safe
+	// to ignore these. If there is an actual error, it will manifest in
+	// the evaluation of v.
+	in := cue.Value{}
+	gen := []ast.Node{}
+	for _, v := range mSplit {
+		// TODO: consider resolving incomplete values within the current
+		// scope, as we do for fields.
+		if v.IsValid() {
+			in = in.Unify(v)
+		}
+		gen = append(gen, v.Source())
+	}
+
 	switch v.Kind() {
 	case cue.StructKind:
 		// TODO: merge template preprocessing with that of fields.
 
-		// Only keep the good parts of the template.
-		// Incoming structs may be incomplete resulting in errors. It is safe
-		// to ignore these. If there is an actual error, it will manifest in
-		// the evaluation of v.
-		in := cue.Value{}
-		gen := []ast.Node{}
-		for _, v := range mSplit {
-			// TODO: consider resolving incomplete values within the current
-			// scope, as we do for fields.
-			if v.IsValid() {
-				in = in.Unify(v)
-			}
-			// Collect generated nodes.
-			gen = append(gen, v.Source())
-		}
-
 		// Identify generated components and unify them with the mixin value.
+		exists := false
 		for _, v := range v.Split() {
 			if src := v.Source(); t.alwaysGen[src] {
 				if w := in.Unify(v); w.Err() == nil {
 					in = w
 				}
+				// One of the sources of this struct is generated. That means
+				// we can safely delete a non-generated version.
+				exists = true
 				gen = append(gen, src)
 			}
 		}
@@ -346,6 +370,38 @@
 			rm = append(rm, removed...)
 		}
 
+		canRemove := fn == nil
+		for _, v := range vSplit {
+			src := v.Source()
+			if t.fromComp[src] {
+				canRemove = true
+			}
+		}
+
+		// Remove fields from source.
+		for _, v := range vSplit {
+			if src := v.Source(); !t.alwaysGen[src] {
+				switch x := src.(type) {
+				case *ast.File:
+					// TODO: use in instead?
+					x.Decls = t.trimDecls(x.Decls, rm, m, canRemove)
+
+				case *ast.StructLit:
+					x.Elts = t.trimDecls(x.Elts, rm, m, canRemove)
+					exists = exists || m.Exists()
+					if len(x.Elts) == 0 && exists && t.canRemove(src) && !inNodes(gen, src) {
+						rmSet = append(rmSet, src)
+					}
+
+				default:
+					if len(t.stack) == 1 {
+						// TODO: fix this hack to pass down the fields to remove
+						return rm
+					}
+				}
+			}
+		}
+
 		if *fTrace {
 			w := &bytes.Buffer{}
 			fmt.Fprintln(w)
@@ -367,29 +423,6 @@
 			t.traceMsg(w.String())
 		}
 
-		// Remove fields from source.
-		for _, v := range vSplit {
-			if src := v.Source(); !t.alwaysGen[src] {
-				switch x := src.(type) {
-				case *ast.File:
-					// TODO: use in instead?
-					x.Decls = t.trimDecls(x.Decls, rm, m, fn != nil)
-
-				case *ast.StructLit:
-					x.Elts = t.trimDecls(x.Elts, rm, m, fn != nil)
-					if len(x.Elts) == 0 && m.Exists() && t.canRemove(src) && !inNodes(gen, src) {
-						rmSet = append(rmSet, src)
-					}
-
-				default:
-					if len(t.stack) == 1 {
-						// TODO: fix this hack to pass down the fields to remove
-						return rm
-					}
-				}
-			}
-		}
-
 	case cue.ListKind:
 		mIter, _ := m.List()
 		i := 0
@@ -416,7 +449,7 @@
 						break
 					}
 				}
-				if rmList && m.Exists() && t.canRemove(src) {
+				if rmList && m.Exists() && t.canRemove(src) && !inNodes(gen, src) {
 					rmSet = append(rmSet, src)
 				}
 			}
@@ -424,17 +457,6 @@
 		fallthrough
 
 	default:
-		// Collect generated nodes.
-		in := cue.Value{}
-		gen := []ast.Node{}
-		for _, v := range mSplit {
-			if v.IsValid() {
-				in = in.Unify(v)
-			}
-			// Collect generated nodes.
-			gen = append(gen, v.Source())
-		}
-
 		for _, v := range vSplit {
 			src := v.Source()
 			if t.alwaysGen[src] || inNodes(gen, src) {
@@ -492,14 +514,14 @@
 	return rmSet
 }
 
-func (t *trimSet) trimDecls(decls []ast.Decl, rm []ast.Node, m cue.Value, fromTemplate bool) []ast.Decl {
+func (t *trimSet) trimDecls(decls []ast.Decl, rm []ast.Node, m cue.Value, allow bool) []ast.Decl {
 	a := make([]ast.Decl, 0, len(decls))
 
 	for _, d := range decls {
 		if f, ok := d.(*ast.Field); ok {
 			label, _ := ast.LabelName(f.Label)
 			v := m.Lookup(label)
-			if inNodes(rm, f.Value) && (!fromTemplate || v.Exists()) {
+			if inNodes(rm, f.Value) && (allow || v.Exists()) {
 				continue
 			}
 		}
diff --git a/cue/ast.go b/cue/ast.go
index 2591bbd..37b92dc 100644
--- a/cue/ast.go
+++ b/cue/ast.go
@@ -42,17 +42,12 @@
 		return result.(*bottom)
 	}
 
-	for _, c := range v.comprehensions {
-		inst.rootValue = mkBin(v.ctx(), token.NoPos, opUnify, inst.rootValue, c)
-	}
-
 	return nil
 }
 
 type astVisitor struct {
 	*astState
-	object         *structLit
-	comprehensions []*structComprehension
+	object *structLit
 
 	inSelector int
 }
@@ -164,9 +159,6 @@
 				v1.walk(e)
 			}
 		}
-		for _, c := range v1.comprehensions {
-			v.comprehensions = append(v.comprehensions, c)
-		}
 		value = obj
 
 	case *ast.ImportDecl:
@@ -198,17 +190,10 @@
 			}
 		}
 		value = obj
-		for i, c := range v1.comprehensions {
-			if i == 0 && obj.template == nil && len(obj.arcs) == 0 {
-				value = c
-				continue
-			}
-			value = mkBin(v.ctx(), token.NoPos, opUnify, value, c)
-		}
 
 	case *ast.ComprehensionDecl:
 		yielder := &yield{baseValue: newExpr(n.Field.Value)}
-		sc := &structComprehension{
+		fc := &fieldComprehension{
 			baseValue: newDecl(n),
 			clauses:   wrapClauses(v, yielder, n.Clauses),
 		}
@@ -228,7 +213,7 @@
 			v.setScope(field, template)
 			template.value = v.walk(field.Value)
 			yielder.value = template
-			sc.isTemplate = true
+			fc.isTemplate = true
 
 		case *ast.BasicLit, *ast.Ident:
 			name, ok := ast.LabelName(x)
@@ -245,19 +230,19 @@
 		}
 		// yielder.key = v.walk(n.Field.Label)
 		// yielder.value = v.walk(n.Field.Value)
-		v.comprehensions = append(v.comprehensions, sc)
+		v.object.comprehensions = append(v.object.comprehensions, fc)
 
 	case *ast.Field:
 		switch x := n.Label.(type) {
 		case *ast.Interpolation:
 			yielder := &yield{baseValue: newNode(x)}
-			sc := &structComprehension{
+			fc := &fieldComprehension{
 				baseValue: newDecl(n),
 				clauses:   yielder,
 			}
 			yielder.key = v.walk(x)
 			yielder.value = v.walk(n.Value)
-			v.comprehensions = append(v.comprehensions, sc)
+			v.object.comprehensions = append(v.object.comprehensions, fc)
 
 		case *ast.TemplateLabel:
 			f := v.label(x.Ident.Name, true)
diff --git a/cue/ast_test.go b/cue/ast_test.go
index bbee836..f9bdd8e 100644
--- a/cue/ast_test.go
+++ b/cue/ast_test.go
@@ -211,13 +211,13 @@
 			c: 3
 		}
 		`,
-		out: `<0>{a: { <1>for k, v in <0>.b if (<0>.b.a < <1>.k) yield (""+<1>.k+""): <1>.v }, b: <2>{a: 1, b: 2, c: 3}}`,
+		out: `<0>{a: <1>{ <2>for k, v in <0>.b if (<0>.b.a < <2>.k) yield (""+<2>.k+""): <2>.v}, b: <3>{a: 1, b: 2, c: 3}}`,
 	}, {
 		in: `
 			a: { "\(v)": v for k, v in b }
 			b: { a: "aa", b: "bb", c: "cc" }
 			`,
-		out: `<0>{a: { <1>for k, v in <0>.b yield (""+<1>.v+""): <1>.v }, b: <2>{a: "aa", b: "bb", c: "cc"}}`,
+		out: `<0>{a: <1>{ <2>for k, v in <0>.b yield (""+<2>.v+""): <2>.v}, b: <3>{a: "aa", b: "bb", c: "cc"}}`,
 	}, {
 		in: `
 			a: [ v for _, v in b ]
diff --git a/cue/binop.go b/cue/binop.go
index 016ecc1..30c99bb 100644
--- a/cue/binop.go
+++ b/cue/binop.go
@@ -461,7 +461,7 @@
 		return x
 	}
 	arcs := make(arcs, 0, len(x.arcs)+len(y.arcs))
-	obj := &structLit{binSrc(src.Pos(), op, x, other), x.emit, nil, arcs}
+	obj := &structLit{binSrc(src.Pos(), op, x, other), x.emit, nil, nil, arcs}
 	defer ctx.pushForwards(x, obj, y, obj).popForwards()
 
 	tx, ex := evalLambda(ctx, x.template)
@@ -486,7 +486,16 @@
 		t = v.(*lambdaExpr)
 	}
 	if t != nil {
-		obj.template = ctx.copy(t).(*lambdaExpr)
+		obj.template = ctx.copy(t)
+	}
+
+	sz := len(x.comprehensions) + len(y.comprehensions)
+	obj.comprehensions = make([]*fieldComprehension, sz)
+	for i, c := range x.comprehensions {
+		obj.comprehensions[i] = ctx.copy(c).(*fieldComprehension)
+	}
+	for i, c := range y.comprehensions {
+		obj.comprehensions[i+len(x.comprehensions)] = ctx.copy(c).(*fieldComprehension)
 	}
 
 	for _, a := range x.arcs {
@@ -497,7 +506,7 @@
 	for _, a := range y.arcs {
 		v := ctx.copy(a.v)
 		for i, b := range obj.arcs {
-			if a.feature == b.feature && a.v != b.v {
+			if a.feature == b.feature {
 				v = mkBin(ctx, src.Pos(), opUnify, b.v, v)
 				obj.arcs[i].v = v
 				continue outer
@@ -973,3 +982,7 @@
 func (x *yield) binOp(ctx *context, src source, op op, other evaluated) evaluated {
 	return ctx.mkIncompatible(src, op, x, other)
 }
+
+func (x *fieldComprehension) binOp(ctx *context, src source, op op, other evaluated) evaluated {
+	return ctx.mkIncompatible(src, op, x, other)
+}
diff --git a/cue/copy.go b/cue/copy.go
index 3833925..39ef674 100644
--- a/cue/copy.go
+++ b/cue/copy.go
@@ -30,25 +30,37 @@
 
 	case *structLit:
 		arcs := make(arcs, len(x.arcs))
+
+		obj := &structLit{x.baseValue, nil, nil, nil, arcs}
+
+		defer ctx.pushForwards(x, obj).popForwards()
+
 		emit := x.emit
 		if emit != nil {
 			emit = ctx.copy(x.emit)
 		}
+		obj.emit = x.emit
+
 		t := x.template
 		if t != nil {
 			v := ctx.copy(t)
 			if isBottom(v) {
 				return t, false
 			}
-			t = v.(*lambdaExpr)
+			t = v
 		}
-		obj := &structLit{x.baseValue, emit, t, arcs}
+		obj.template = t
 
-		defer ctx.pushForwards(x, obj).popForwards()
 		for i, a := range x.arcs {
 			v := ctx.copy(a.v)
 			arcs[i] = arc{a.feature, v, nil}
 		}
+
+		comp := make([]*fieldComprehension, len(x.comprehensions))
+		for i, c := range x.comprehensions {
+			comp[i] = ctx.copy(c).(*fieldComprehension)
+		}
+		obj.comprehensions = comp
 		return obj, false
 
 	case *lambdaExpr:
diff --git a/cue/debug.go b/cue/debug.go
index 8d79a18..91156eb 100644
--- a/cue/debug.go
+++ b/cue/debug.go
@@ -235,6 +235,12 @@
 			write(", ")
 		}
 		p.debugStr(x.arcs)
+		for i, c := range x.comprehensions {
+			p.debugStr(c)
+			if i < len(x.comprehensions)-1 {
+				p.write(", ")
+			}
+		}
 		write("}")
 
 	case []arc:
@@ -257,10 +263,8 @@
 		p.write(": ")
 		p.debugStr(n)
 
-	case *structComprehension:
-		writef("{")
+	case *fieldComprehension:
 		p.debugStr(x.clauses)
-		write(" }")
 
 	case *listComprehension:
 		writef("[")
diff --git a/cue/eval.go b/cue/eval.go
index 23c7b4e..02c1117 100644
--- a/cue/eval.go
+++ b/cue/eval.go
@@ -16,7 +16,6 @@
 
 import (
 	"bytes"
-	"sort"
 )
 
 func eval(idx *index, v value) evaluated {
@@ -265,42 +264,6 @@
 	return &list{x.baseValue, a, t, n}
 }
 
-func (x *structComprehension) evalPartial(ctx *context) evaluated {
-	obj := &structLit{baseValue: x.baseValue}
-	result := x.clauses.yield(ctx, func(k, v evaluated) *bottom {
-		if !k.kind().isAnyOf(stringKind) {
-			return ctx.mkErr(k, "key must be of type string")
-		}
-		if x.isTemplate {
-			if obj.template == nil {
-				obj.template = v
-			} else {
-				obj.template = mkBin(ctx, x.Pos(), opUnify, obj.template, v)
-			}
-			return nil
-		}
-		// TODO: improve big O
-		f := ctx.label(k.strValue(), true)
-		for i, a := range obj.arcs {
-			if a.feature == f {
-				obj.arcs[i].v = mkBin(ctx, x.Pos(), opUnify, a.v, v)
-				return nil
-			}
-		}
-		obj.arcs = append(obj.arcs, arc{feature: f, v: v})
-		return nil
-	})
-	switch {
-	case result == nil:
-	case isBottom(result):
-		return result
-	default:
-		panic("should not happen")
-	}
-	sort.Stable(obj)
-	return obj
-}
-
 func (x *listComprehension) evalPartial(ctx *context) evaluated {
 	list := &list{baseValue: x.baseValue}
 	result := x.clauses.yield(ctx, func(k, v evaluated) *bottom {
@@ -325,6 +288,8 @@
 func (x *guard) evalPartial(ctx *context) evaluated { return x }
 func (x *yield) evalPartial(ctx *context) evaluated { return x }
 
+func (x *fieldComprehension) evalPartial(ctx *context) evaluated { return x }
+
 func (x *structLit) evalPartial(ctx *context) (result evaluated) {
 	if ctx.trace {
 		defer uni(indent(ctx, "struct eval", x))
@@ -333,6 +298,7 @@
 	x = ctx.deref(x).(*structLit)
 
 	// TODO: Handle cycle?
+
 	return x
 }
 
diff --git a/cue/export.go b/cue/export.go
index d2f5937..6774be7 100644
--- a/cue/export.go
+++ b/cue/export.go
@@ -129,24 +129,32 @@
 				Value: p.expr(a.v),
 			})
 		}
-		return obj
+		for _, c := range x.comprehensions {
+			var clauses []ast.Clause
+			next := c.clauses
+			for {
+				if yield, ok := next.(*yield); ok {
+					f := &ast.Field{
+						Label: p.expr(yield.key).(ast.Label),
+						Value: p.expr(yield.value),
+					}
+					var decl ast.Decl = f
+					if len(clauses) > 0 {
+						decl = &ast.ComprehensionDecl{Field: f, Clauses: clauses}
+					}
+					obj.Elts = append(obj.Elts, decl)
+					break
+				}
 
-	case *structComprehension:
-		var clauses []ast.Clause
-		for y, next := p.clause(x.clauses); ; y, next = p.clause(next) {
-			clauses = append(clauses, y)
-			if yield, ok := next.(*yield); ok {
-				return &ast.StructLit{Elts: []ast.Decl{
-					&ast.ComprehensionDecl{
-						Field: &ast.Field{
-							Label: p.expr(yield.key).(ast.Label),
-							Value: p.expr(yield.value),
-						},
-						Clauses: clauses,
-					},
-				}}
+				var y ast.Clause
+				y, next = p.clause(next)
+				clauses = append(clauses, y)
 			}
 		}
+		return obj
+
+	case *fieldComprehension:
+		panic("should be handled in structLit")
 
 	case *listComprehension:
 		var clauses []ast.Clause
diff --git a/cue/instance.go b/cue/instance.go
index 66c9c4b..43db201 100644
--- a/cue/instance.go
+++ b/cue/instance.go
@@ -19,6 +19,7 @@
 	"cuelang.org/go/cue/build"
 	"cuelang.org/go/cue/token"
 	"cuelang.org/go/internal"
+	"golang.org/x/exp/errors/fmt"
 )
 
 // An Instance defines a single configuration based on a collection of
@@ -233,7 +234,10 @@
 	value := convert(ctx, root, x)
 	eval := binOp(ctx, baseValue{}, opUnify, root, value)
 	// TODO: validate recursively?
-	st := eval.(*structLit)
+	st, ok := eval.(*structLit)
+	if !ok {
+		fmt.Errorf("structure at path did not resolve in struct")
+	}
 	inst = &Instance{
 		rootStruct: st,
 		rootValue:  st,
diff --git a/cue/resolve_test.go b/cue/resolve_test.go
index 4888b8f..b8108fe 100644
--- a/cue/resolve_test.go
+++ b/cue/resolve_test.go
@@ -430,14 +430,14 @@
 				e2: int & 4.0/2.0
 				`,
 		out: `<0>{v1: 5e+11, v2: true, i1: 1, v5: 2, e1: _|_((2.0 % 3):unsupported op %(float, number)), e2: _|_((int & 2):unsupported op &((int)*, float))}`,
-		// }, {
-		// 	desc: "null coalescing",
-		// 	in: `
-		// 		a: null
-		// 		b: a.x
-		// 		c: a["x"]
-		// 	`,
-		// 	out: ``,
+	}, {
+		desc: "null coalescing",
+		in: `
+				a: null
+				b: a.x | "b"
+				c: a["x"] | "c"
+			`,
+		out: `<1>{a: null, b: "b", c: "c"}`,
 	}, {
 		desc: "reference across tuples and back",
 		// Tests that it is okay to partially evaluate structs.
@@ -769,6 +769,17 @@
 		`,
 		out: `<0>{k1: 44, k2: -8000000000, ` +
 			`e1: _|_(((-32768..32767) & 100000):value 100000 not in range (-32768..32767))}`,
+	}, {
+		desc: "field comprehensions",
+		in: `
+			obj foo a: "bar"
+			obj <Name>: {
+				a: "dummy" | string
+				sub as: a if true
+			}
+		`,
+		out: `<0>{obj: <1>{<>: <2>(Name: string)-><3>{a: ("dummy" | string) if true yield ("sub"): <4>{as: <3>.a}}, ` +
+			`foo: <5>{a: "bar", sub: <6>{as: "bar"}}}}`,
 		// TODO(P3): if two fields are evaluating to the same field, their
 		// values could be bound.
 		// nodes:   use a shared node
@@ -853,6 +864,27 @@
 			`,
 		out: `<0>{a: <1>{b: 2, c: 3}, b: <2>{a: 1, b: 2, c: 3, d: 4}, c: <3>{b: 2, c: 3}}`,
 	}, {
+		desc: "conditional field",
+		in: `
+			a: "foo" if b
+			b: true
+			c: {
+				a: 3
+				a: 3 if a > 1
+			}
+		`,
+		out: `<0>{b: true, c: <1>{a: _|_(3:field a both generated by and referred to by field comprehension in same struct)}, a: "foo"}`,
+	}, {
+		desc: "referencing field in field comprehension",
+		in: `
+		a: { b c: 4 }
+		a: {
+			b d: 5
+			"\(k)": v for k, v in b
+		}
+		`,
+		out: `<0>{a: <1>{b: <2>{c: 4, d: 5}, c: 4, d: 5}}`,
+	}, {
 		desc: "nested templates in one field",
 		in: `
 			a <A> b <B>: {
diff --git a/cue/rewrite.go b/cue/rewrite.go
index aa0ef55..b3b328f 100644
--- a/cue/rewrite.go
+++ b/cue/rewrite.go
@@ -199,12 +199,12 @@
 	return &listComprehension{x.baseValue, clauses}
 }
 
-func (x *structComprehension) rewrite(ctx *context, fn rewriteFunc) value {
+func (x *fieldComprehension) rewrite(ctx *context, fn rewriteFunc) value {
 	clauses := rewrite(ctx, x.clauses, fn).(yielder)
 	if clauses == x.clauses {
 		return x
 	}
-	return &structComprehension{x.baseValue, clauses, x.isTemplate}
+	return &fieldComprehension{x.baseValue, clauses, x.isTemplate}
 }
 
 func (x *yield) rewrite(ctx *context, fn rewriteFunc) value {
diff --git a/cue/rewrite_test.go b/cue/rewrite_test.go
index 5da3f96..a6db968 100644
--- a/cue/rewrite_test.go
+++ b/cue/rewrite_test.go
@@ -74,6 +74,7 @@
 			}
 			x = e.(*structLit)
 		}
+		x.expand(ctx)
 		arcs := make(arcs, len(x.arcs))
 		for i, a := range x.arcs {
 			v := x.at(ctx, i)
@@ -85,10 +86,10 @@
 			if isBottom(v) {
 				return v
 			}
-			t = v.(*lambdaExpr)
+			t = v
 		}
 		emit := testResolve(ctx, x.emit, m)
-		obj := &structLit{x.baseValue, emit, t, arcs}
+		obj := &structLit{x.baseValue, emit, t, nil, arcs}
 		return obj
 	case *list:
 		a := make([]value, len(x.a))
diff --git a/cue/strip.go b/cue/strip.go
index 26e1f43..3822ab2 100644
--- a/cue/strip.go
+++ b/cue/strip.go
@@ -30,13 +30,15 @@
 	eval := ctx.manifest(v)
 	switch x := eval.(type) {
 	case *structLit:
+		x.expand(ctx)
 		if x.template != nil {
 			arcs := make(arcs, len(x.arcs))
 			for i, a := range x.arcs {
 				v := rewrite(ctx, x.at(ctx, i), stripRewriter)
 				arcs[i] = arc{a.feature, v, nil}
 			}
-			return &structLit{x.baseValue, x.emit, nil, arcs}, false
+			// TODO: verify that len(x.comprehensions) == 0
+			return &structLit{x.baseValue, x.emit, nil, nil, arcs}, false
 		}
 	}
 	return eval, true
diff --git a/cue/subsume.go b/cue/subsume.go
index 9722fb1..008e5ae 100644
--- a/cue/subsume.go
+++ b/cue/subsume.go
@@ -67,6 +67,12 @@
 
 func (x *structLit) subsumesImpl(ctx *context, v value, mode subsumeMode) bool {
 	if o, ok := v.(*structLit); ok {
+		// TODO: consider what to do with templates. Perhaps we should always
+		// do subsumption on fully evaluated structs.
+		if len(x.comprehensions) > 0 { //|| x.template != nil {
+			return false
+		}
+
 		// all arcs in n must exist in v and its values must subsume.
 		for _, a := range x.arcs {
 			b, _ := o.lookup(ctx, a.feature)
@@ -335,8 +341,8 @@
 }
 
 // structural equivalence
-func (x *structComprehension) subsumesImpl(ctx *context, v value, mode subsumeMode) bool {
-	if b, ok := v.(*structComprehension); ok {
+func (x *fieldComprehension) subsumesImpl(ctx *context, v value, mode subsumeMode) bool {
+	if b, ok := v.(*fieldComprehension); ok {
 		return subsumes(ctx, x.clauses, b.clauses, 0)
 	}
 	return isBottom(v)
diff --git a/cue/types.go b/cue/types.go
index a145249..0da5f1b 100644
--- a/cue/types.go
+++ b/cue/types.go
@@ -794,6 +794,8 @@
 	}
 	obj := v.eval(ctx).(*structLit)
 
+	obj.expand(ctx) // expand comprehensions
+
 	// check if any labels are hidden
 	f := label(0)
 	for _, a := range obj.arcs {
@@ -814,6 +816,7 @@
 			obj.baseValue,
 			obj.emit,
 			obj.template,
+			nil,
 			arcs,
 		}
 
@@ -826,6 +829,7 @@
 		return structValue{}, err
 	}
 	obj := v.eval(ctx).(*structLit)
+	obj.expand(ctx)
 
 	return structValue{ctx, v.path, obj}, nil
 }
diff --git a/cue/validate.go b/cue/validate.go
index a1181a5..336f411 100644
--- a/cue/validate.go
+++ b/cue/validate.go
@@ -22,6 +22,7 @@
 	}
 	switch x := eval.(type) {
 	case *structLit:
+		x.expand(ctx)
 		for i := range x.arcs {
 			if err := validate(ctx, x.at(ctx, i)); err != nil {
 				return err
diff --git a/cue/value.go b/cue/value.go
index 1887681..95abb6c 100644
--- a/cue/value.go
+++ b/cue/value.go
@@ -556,9 +556,12 @@
 // insertRaw to insert arcs to ensure this invariant holds.
 type structLit struct {
 	baseValue
-	emit value // currently only supported at top level.
 
-	template value
+	// TODO(perf): separate out these infrequent values to save space.
+	emit           value // currently only supported at top level.
+	template       value
+	comprehensions []*fieldComprehension
+
 	// TODO: consider hoisting the template arc to its own value.
 	arcs []arc
 }
@@ -577,6 +580,7 @@
 
 // lookup returns the node for the given label f, if present, or nil otherwise.
 func (x *structLit) lookup(ctx *context, f label) (v evaluated, raw value) {
+	x.expand(ctx)
 	// Lookup is done by selector or index references. Either this is done on
 	// literal nodes or nodes obtained from references. In the later case,
 	// noderef will have ensured that the ancestors were evaluated.
@@ -589,6 +593,7 @@
 }
 
 func (x *structLit) iterAt(ctx *context, i int) (evaluated, value, label) {
+	x.expand(ctx)
 	if i >= len(x.arcs) {
 		return nil, nil, 0
 	}
@@ -677,6 +682,10 @@
 // is referred to in a new substructure.
 
 func (x *structLit) at(ctx *context, i int) evaluated {
+	x.expand(ctx)
+	// if x.emit != nil && isBottom(x.emit) {
+	// 	return x.emit.(evaluated)
+	// }
 	// Lookup is done by selector or index references. Either this is done on
 	// literal nodes or nodes obtained from references. In the later case,
 	// noderef will have ensured that the ancestors were evaluated.
@@ -687,7 +696,6 @@
 		v := x.arcs[i].v.evalPartial(ctx)
 
 		if err := cycleError(v); err != nil {
-
 			x.arcs[i].cache = nil
 			return err
 		}
@@ -698,18 +706,77 @@
 	return x.arcs[i].cache
 }
 
+func (x *structLit) expand(ctx *context) {
+	if x.comprehensions == nil {
+		return
+	}
+
+	comprehensions := x.comprehensions
+	x.comprehensions = nil
+
+	newArcs := []arc{}
+	for _, c := range comprehensions {
+		result := c.clauses.yield(ctx, func(k, v evaluated) *bottom {
+			if !k.kind().isAnyOf(stringKind) {
+				return ctx.mkErr(k, "key must be of type string")
+			}
+			if c.isTemplate {
+				// TODO: disallow altogether or only when it refers to fields.
+				if x.template == nil {
+					x.template = v
+				} else {
+					x.template = mkBin(ctx, c.Pos(), opUnify, x.template, v)
+				}
+				return nil
+			}
+			// TODO(perf): improve big O
+			f := ctx.label(k.strValue(), true)
+			for i, a := range newArcs {
+				if a.feature == f {
+					newArcs[i].v = mkBin(ctx, x.Pos(), opUnify, a.v, v)
+					return nil
+				}
+			}
+			newArcs = append(newArcs, arc{feature: f, v: v})
+			return nil
+		})
+		switch {
+		case result == nil:
+		case isBottom(result):
+			x.emit = result
+		default:
+			panic("should not happen")
+		}
+	}
+
+	// new arcs may be merged with old ones, but only if the old ones were not
+	// referred to in the evaluation of any of the arcs.
+	// TODO(perf): improve big O
+outer:
+	for _, na := range newArcs {
+		f := na.feature
+		for i, a := range x.arcs {
+			if a.feature == f {
+				if x.arcs[i].cache != nil {
+					x.arcs[i].cache = ctx.mkErr(na.v, "field %s both generated by and referred to by field comprehension in same struct",
+						ctx.labelStr(f))
+				} else {
+					x.arcs[i].v = mkBin(ctx, x.Pos(), opUnify, a.v, na.v)
+				}
+				continue outer
+			}
+		}
+		x.arcs = append(x.arcs, arc{feature: f, v: na.v})
+	}
+	sort.Stable(x)
+}
+
 func (x *structLit) applyTemplate(ctx *context, i int, v evaluated) evaluated {
 	if x.template != nil {
-		e := ctx.manifest(x.template)
-		if isBottom(e) {
-			return e
+		fn, err := evalLambda(ctx, x.template)
+		if err != nil {
+			return err
 		}
-
-		fn, ok := e.(*lambdaExpr)
-		if !ok {
-			return ctx.mkErr(e, "expected lambda expression")
-		}
-
 		name := ctx.labelStr(x.arcs[i].feature)
 		arg := &stringLit{x.baseValue, name}
 		w := fn.call(ctx, x, arg).evalPartial(ctx)
@@ -887,7 +954,8 @@
 	}
 	lambda := &lambdaExpr{x.baseValue, &params{arcs}, nil}
 	defer ctx.pushForwards(x, lambda).popForwards()
-	return ctx.copy(x.value)
+	obj := ctx.copy(x.value)
+	return obj
 }
 
 // Operations
@@ -1012,19 +1080,14 @@
 	return listKind | nonGround | referenceKind
 }
 
-// TODO: structComprehensions are a left-over from the past and are used to
-// interpret field comprehensions. The resulting construction prohibits
-// references in comprehension clauses to be able to reference values in
-// the struct in which the field is defined. See the updater config in the
-// kubernetes demo.
-type structComprehension struct {
+type fieldComprehension struct {
 	baseValue
 	clauses    yielder
 	isTemplate bool
 }
 
-func (x *structComprehension) kind() kind {
-	return structKind | nonGround | referenceKind
+func (x *fieldComprehension) kind() kind {
+	return topKind | nonGround
 }
 
 type yieldFunc func(k, v evaluated) *bottom
@@ -1103,6 +1166,7 @@
 
 	switch src := source.(type) {
 	case *structLit:
+		src.expand(ctx)
 		for i, a := range src.arcs {
 			key := &stringLit{
 				x.baseValue,
diff --git a/doc/ref/spec.md b/doc/ref/spec.md
index e1d4255..a61736a 100644
--- a/doc/ref/spec.md
+++ b/doc/ref/spec.md
@@ -1847,7 +1847,7 @@
 
 ### Comprehensions
 
-Lists and structs can be constructed using comprehensions.
+Lists and fields can be constructed using comprehensions.
 
 Each define a clause sequence that consists of a sequence of `for`, `if`, and
 `let` clauses, nesting from left to right.
@@ -1874,9 +1874,11 @@
 List comprehensions specify a single expression that is evaluated and included
 in the list for each completed iteration.
 
-Struct comprehensions specify two expressions, one for the label and one for
-the value, that each get evaluated and included as a field in the struct
-for each completed iteration.
+Field comprehensions specify a field that is included in the struct for each
+completed iteration.
+If the same field is generated more than once, its values are unified.
+The clauses of a field comprehension may not refer to fields generated by
+field comprehensions defined in the same struct.
 
 ```
 ComprehensionDecl   = Field [ "<-" ] Clauses .
@@ -1893,31 +1895,10 @@
 a: [1, 2, 3, 4]
 b: [ x+1 for x in a if x > 1]  // [3, 4, 5]
 
-c: { ("\(x)"): x + y for x in a if x < 4 let y = 1 }
+c: { "\(x)": x + y for x in a if x < 4 let y = 1 }
 d: { "1": 2, "2": 3, "3": 4 }
 ```
 
-<!---
-The above examples could be expressed as struct unification as follows:
-
-```
-b: [ ]
-bc: {
-    x1:    _
-    next: {
-        x2:  true & x1 > 1
-        next: {
-            x3: x1+1
-        }
-    }
-    result: next.next.x3 | []
-}
-```
-
-Struct comprehensions cannot be expressed as such as there is not equivalent
-in the language to have computed field labels.
---->
-
 
 ### String interpolation