cue/token: prepare to remove Fileset

Change-Id: I9f4602710ae245bf09fc02b84850dfe387cea690
Reviewed-on: https://cue-review.googlesource.com/c/cue/+/2122
Reviewed-by: Marcel van Lohuizen <mpvl@google.com>
diff --git a/cue/ast/ast.go b/cue/ast/ast.go
index 43bbde1..4fe5c7b 100644
--- a/cue/ast/ast.go
+++ b/cue/ast/ast.go
@@ -666,6 +666,10 @@
 	if len(f.Decls) > 0 {
 		return f.Decls[0].Pos()
 	}
+	if f.Filename != "" {
+		// TODO. Do something more principled and efficient.
+		return token.NewFile(f.Filename, -1, 1).Pos(0, 0)
+	}
 	return token.NoPos
 }
 
diff --git a/cue/build.go b/cue/build.go
index d0ee27a..24a5f07 100644
--- a/cue/build.go
+++ b/cue/build.go
@@ -86,7 +86,7 @@
 	// FileSet idea from the API. Just take the hit of the extra pointers for
 	// positions in the ast, and then optimize the storage in an abstract
 	// machine implementation for storing graphs.
-	f.AddFile("dummy", sharedOffset, 0)
+	token.NewFile("dummy", sharedOffset, 0)
 	i := &index{
 		fset:     f,
 		labelMap: map[string]label{"": 0},
@@ -182,7 +182,7 @@
 }
 
 func lineStr(idx *index, n ast.Node) string {
-	return idx.fset.Position(n.Pos()).String()
+	return n.Pos().String()
 }
 
 func resolveFiles(idx *index, p *build.Instance) error {
diff --git a/cue/build/context.go b/cue/build/context.go
index b918833..c1b3649 100644
--- a/cue/build/context.go
+++ b/cue/build/context.go
@@ -101,14 +101,6 @@
 	return c
 }
 
-// Pos returns position information for a token.Pos.
-func (c *Context) Pos(pos token.Pos) token.Position {
-	if c.fset == nil {
-		return token.Position{}
-	}
-	return c.fset.Position(pos)
-}
-
 // FileSet reports the file set used for parsing files.
 func (c *Context) FileSet() *token.FileSet {
 	c.init()
diff --git a/cue/build/import.go b/cue/build/import.go
index bb29061..40c2711 100644
--- a/cue/build/import.go
+++ b/cue/build/import.go
@@ -40,8 +40,7 @@
 
 	var (
 		c        = inst.ctxt
-		fset     = c.FileSet()
-		imported = map[string][]token.Position{}
+		imported = map[string][]token.Pos{}
 	)
 
 	for _, f := range inst.Files {
@@ -57,7 +56,7 @@
 					// TODO: remove panic
 					log.Panicf("%s: parser returned invalid quoted string: <%s>", f.Filename, quoted)
 				}
-				imported[path] = append(imported[path], fset.Position(spec.Pos()))
+				imported[path] = append(imported[path], spec.Pos())
 			}
 		}
 	}
diff --git a/cue/build/instance.go b/cue/build/instance.go
index 966d9ea..ec6eaf7 100644
--- a/cue/build/instance.go
+++ b/cue/build/instance.go
@@ -100,7 +100,7 @@
 
 	// Dependencies
 	ImportPaths []string
-	ImportPos   map[string][]token.Position // line information for Imports
+	ImportPos   map[string][]token.Pos // line information for Imports
 
 	Deps       []string
 	DepsErrors []error
@@ -140,7 +140,7 @@
 }
 
 func (inst *Instance) errorf(pos token.Pos, format string, args ...interface{}) error {
-	return inst.chkErr(errors.E(inst.ctxt.Pos(pos), fmt.Sprintf(format, args...)))
+	return inst.chkErr(errors.E(pos.Position(), fmt.Sprintf(format, args...)))
 }
 
 // Context defines the build context for this instance. All files defined
diff --git a/cue/errors.go b/cue/errors.go
index 018e267..4247fb5 100644
--- a/cue/errors.go
+++ b/cue/errors.go
@@ -81,20 +81,20 @@
 
 func (x *bottom) kind() kind { return bottomKind }
 
-func (x *bottom) Position() []token.Position {
+func (x *bottom) Position() []token.Pos {
 	if x.index != nil && x.index.fset != nil {
 		return appendPositions(nil, x.index.fset, x.pos)
 	}
 	return nil
 }
 
-func appendPositions(pos []token.Position, fset *token.FileSet, src source) []token.Position {
+func appendPositions(pos []token.Pos, fset *token.FileSet, src source) []token.Pos {
 	if src != nil {
 		if p := src.Pos(); p != token.NoPos {
 			if p.Offset() >= sharedOffset {
 				fset = sharedIndex.fset
 			}
-			return append(pos, fset.Position(src.Pos()))
+			return append(pos, src.Pos())
 		}
 		if c := src.computed(); c != nil {
 			pos = appendPositions(pos, fset, c.x)
@@ -127,7 +127,7 @@
 			if p.Offset() >= sharedOffset {
 				fset = sharedIndex.fset
 			}
-			return append(locs, fset.Position(src.Pos()).String())
+			return append(locs, src.Pos().String())
 		}
 		if c := src.computed(); c != nil {
 			locs = appendLocations(locs, fset, c.x)
diff --git a/cue/format/format_test.go b/cue/format/format_test.go
index ea4010f..8510209 100644
--- a/cue/format/format_test.go
+++ b/cue/format/format_test.go
@@ -358,8 +358,8 @@
 			t.Errorf("got ident %s; want %s", i2.Name, i1.Name)
 		}
 
-		l1 := fset.Position(i1.Pos()).Line
-		l2 := fset.Position(i2.Pos()).Line
+		l1 := i1.Pos().Line()
+		l2 := i2.Pos().Line()
 		if l2 != l1 {
 			t.Errorf("got line %d; want %d for %s", l2, l1, i1.Name)
 		}
diff --git a/cue/format/printer.go b/cue/format/printer.go
index 148cdfd..96518a0 100644
--- a/cue/format/printer.go
+++ b/cue/format/printer.go
@@ -65,7 +65,7 @@
 	if p.fset == nil {
 		return 0
 	}
-	return p.fset.Position(pos).Line
+	return pos.Line()
 }
 
 func (p *printer) Print(v interface{}) {
@@ -156,7 +156,7 @@
 	case token.Pos:
 		// TODO: should we use a known file position to synchronize? Go does,
 		// but we don't really have to.
-		// pos := p.fset.Position(x)
+		// pos := x
 		if x.HasRelPos() {
 			if p.allowed&nooverride == 0 {
 				requested := p.allowed
diff --git a/cue/load/import.go b/cue/load/import.go
index fc8f6fd..e066951 100644
--- a/cue/load/import.go
+++ b/cue/load/import.go
@@ -269,7 +269,7 @@
 type fileProcessor struct {
 	firstFile        string
 	firstCommentFile string
-	imported         map[string][]token.Position
+	imported         map[string][]token.Pos
 	allTags          map[string]bool
 	allFiles         bool
 	ignoreOther      bool // ignore files from other packages
@@ -282,7 +282,7 @@
 
 func newFileProcessor(c *Config, p *build.Instance) *fileProcessor {
 	return &fileProcessor{
-		imported: make(map[string][]token.Position),
+		imported: make(map[string][]token.Pos),
 		allTags:  make(map[string]bool),
 		c:        c,
 		pkg:      p,
@@ -409,7 +409,7 @@
 				log.Panicf("%s: parser returned invalid quoted string: <%s>", filename, quoted)
 			}
 			if !isTest || fp.c.Tests {
-				fp.imported[path] = append(fp.imported[path], fset.Position(spec.Pos()))
+				fp.imported[path] = append(fp.imported[path], spec.Pos())
 			}
 		}
 	}
@@ -557,7 +557,7 @@
 	return word, rest
 }
 
-func cleanImports(m map[string][]token.Position) ([]string, map[string][]token.Position) {
+func cleanImports(m map[string][]token.Pos) ([]string, map[string][]token.Pos) {
 	all := make([]string, 0, len(m))
 	for path := range m {
 		all = append(all, path)
diff --git a/cue/parser/error_test.go b/cue/parser/error_test.go
index c6afecf..e20df24 100644
--- a/cue/parser/error_test.go
+++ b/cue/parser/error_test.go
@@ -46,22 +46,8 @@
 
 const testdata = "testdata"
 
-// getFile assumes that each filename occurs at most once
-func getFile(fset *token.FileSet, filename string) (info *token.File) {
-	fset.Iterate(func(f *token.File) bool {
-		if f.Name() == filename {
-			if info != nil {
-				panic(filename + " used multiple times")
-			}
-			info = f
-		}
-		return true
-	})
-	return info
-}
-
-func getPos(fset *token.FileSet, filename string, offset int) token.Pos {
-	if f := getFile(fset, filename); f != nil {
+func getPos(f *token.File, offset int) token.Pos {
+	if f != nil {
 		return f.Pos(offset, 0)
 	}
 	return token.NoPos
@@ -78,14 +64,15 @@
 // expectedErrors collects the regular expressions of ERROR comments found
 // in files and returns them as a map of error positions to error messages.
 //
-func expectedErrors(t *testing.T, fset *token.FileSet, filename string, src []byte) map[token.Pos]string {
+func expectedErrors(t *testing.T, file *token.File, src []byte) map[token.Pos]string {
 	errors := make(map[token.Pos]string)
 
 	var s scanner.Scanner
 	// file was parsed already - do not add it again to the file
 	// set otherwise the position information returned here will
 	// not match the position information collected by the parser
-	s.Init(getFile(fset, filename), src, nil, scanner.ScanComments)
+	// file := token.NewFile(filename, -1, len(src))
+	s.Init(file, src, nil, scanner.ScanComments)
 	var prev token.Pos // position of last non-comment, non-semicolon token
 	var here token.Pos // position immediately after the token at position prev
 
@@ -120,14 +107,14 @@
 // compareErrors compares the map of expected error messages with the list
 // of found errors and reports discrepancies.
 //
-func compareErrors(t *testing.T, fset *token.FileSet, expected map[token.Pos]string, found errors.List) {
+func compareErrors(t *testing.T, file *token.File, expected map[token.Pos]string, found errors.List) {
 	t.Helper()
 	for _, error := range found {
 		// error.Pos is a Position, but we want
 		// a Pos so we can do a map lookup
 		ePos := error.Position()
 		eMsg := error.Error()
-		pos := getPos(fset, ePos.Filename, ePos.Offset).WithRel(0)
+		pos := getPos(file, ePos.Offset).WithRel(0)
 		if msg, found := expected[pos]; found {
 			// we expect a message at pos; check if it matches
 			rx, err := regexp.Compile(msg)
@@ -154,7 +141,7 @@
 	if len(expected) > 0 {
 		t.Errorf("%d errors not reported:", len(expected))
 		for pos, msg := range expected {
-			t.Errorf("%s: -%q-\n", fset.Position(pos), msg)
+			t.Errorf("%s: -%q-\n", pos, msg)
 		}
 	}
 }
@@ -167,8 +154,8 @@
 		return
 	}
 
-	fset := token.NewFileSet()
-	_, err = ParseFile(fset, filename, src, DeclarationErrors, AllErrors)
+	f, err := ParseFile(token.NewFileSet(), filename, src, DeclarationErrors, AllErrors)
+	file := f.Pos().File()
 	found, ok := err.(errors.List)
 	if err != nil && !ok {
 		t.Error(err)
@@ -178,10 +165,13 @@
 
 	// we are expecting the following errors
 	// (collect these after parsing a file so that it is found in the file set)
-	expected := expectedErrors(t, fset, filename, src)
+	if file == nil {
+		t.Fatal("")
+	}
+	expected := expectedErrors(t, file, src)
 
 	// verify errors returned by the parser
-	compareErrors(t, fset, expected, found)
+	compareErrors(t, file, expected, found)
 }
 
 func TestErrors(t *testing.T) {
diff --git a/cue/parser/import.go b/cue/parser/import.go
index d5885ab..62f2520 100644
--- a/cue/parser/import.go
+++ b/cue/parser/import.go
@@ -24,7 +24,7 @@
 
 // sortImports sorts runs of consecutive import lines in import blocks in f.
 // It also removes duplicate imports when it is possible to do so without data loss.
-func sortImports(fset *token.FileSet, f *ast.File) {
+func sortImports(f *ast.File) {
 	for _, d := range f.Decls {
 		d, ok := d.(*ast.ImportDecl)
 		if !ok {
@@ -42,23 +42,23 @@
 		i := 0
 		specs := d.Specs[:0]
 		for j, s := range d.Specs {
-			if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
+			if j > i && s.Pos().Line() > 1+d.Specs[j-1].End().Line() {
 				// j begins a new run. End this one.
-				specs = append(specs, sortSpecs(fset, f, d.Specs[i:j])...)
+				specs = append(specs, sortSpecs(f, d.Specs[i:j])...)
 				i = j
 			}
 		}
-		specs = append(specs, sortSpecs(fset, f, d.Specs[i:])...)
+		specs = append(specs, sortSpecs(f, d.Specs[i:])...)
 		d.Specs = specs
 
 		// Deduping can leave a blank line before the rparen; clean that up.
 		if len(d.Specs) > 0 {
 			lastSpec := d.Specs[len(d.Specs)-1]
-			lastLine := fset.Position(lastSpec.Pos()).Line
-			rParenLine := fset.Position(d.Rparen).Line
+			lastLine := lastSpec.Pos().Line()
+			rParenLine := d.Rparen.Line()
 			for rParenLine > lastLine+1 {
 				rParenLine--
-				fset.File(d.Rparen).MergeLine(rParenLine)
+				d.Rparen.File().MergeLine(rParenLine)
 			}
 		}
 	}
@@ -107,7 +107,7 @@
 	End   token.Pos
 }
 
-func sortSpecs(fset *token.FileSet, f *ast.File, specs []*ast.ImportSpec) []*ast.ImportSpec {
+func sortSpecs(f *ast.File, specs []*ast.ImportSpec) []*ast.ImportSpec {
 	// Can't short-circuit here even if specs are already sorted,
 	// since they might yet need deduplication.
 	// A lone import, however, may be safely ignored.
@@ -136,7 +136,7 @@
 			deduped = append(deduped, s)
 		} else {
 			p := s.Pos()
-			fset.File(p).MergeLine(fset.Position(p).Line)
+			p.File().MergeLine(p.Line())
 		}
 	}
 	specs = deduped
diff --git a/cue/parser/parser.go b/cue/parser/parser.go
index 1baf33e..a81a57b 100644
--- a/cue/parser/parser.go
+++ b/cue/parser/parser.go
@@ -62,7 +62,7 @@
 }
 
 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode []Option) {
-	p.file = fset.AddFile(filename, -1, len(src))
+	p.file = token.NewFile(filename, -1, len(src))
 	for _, f := range mode {
 		f(p)
 	}
diff --git a/cue/scanner/scanner_test.go b/cue/scanner/scanner_test.go
index 18bb460..bf1521b 100644
--- a/cue/scanner/scanner_test.go
+++ b/cue/scanner/scanner_test.go
@@ -190,7 +190,7 @@
 }
 
 func checkPosScan(t *testing.T, lit string, p token.Pos, expected token.Position) {
-	pos := fset.Position(p)
+	pos := p.Position()
 	if pos.Filename != expected.Filename {
 		t.Errorf("bad filename for %q: got %s, expected %s", lit, pos.Filename, expected.Filename)
 	}
@@ -216,7 +216,7 @@
 
 	// verify scan
 	var s Scanner
-	s.Init(fset.AddFile("", fset.Base(), len(source)), source, eh, ScanComments|dontInsertCommas)
+	s.Init(token.NewFile("", 1, len(source)), source, eh, ScanComments|dontInsertCommas)
 
 	// set up expected position
 	epos := token.Position{
@@ -301,7 +301,7 @@
 
 func checkComma(t *testing.T, line string, mode Mode) {
 	var S Scanner
-	file := fset.AddFile("TestCommas", fset.Base(), len(line))
+	file := token.NewFile("TestCommas", 1, len(line))
 	S.Init(file, []byte(line), nil, mode)
 	pos, tok, lit := S.Scan()
 	for tok != token.EOF {
@@ -476,7 +476,7 @@
 		"elided  ,        \n",
 	}
 	var S Scanner
-	f := fset.AddFile("TestCommas", fset.Base(), len(test))
+	f := token.NewFile("TestCommas", 1, len(test))
 	S.Init(f, []byte(test), nil, ScanComments)
 	pos, tok, lit := S.Scan()
 	got := []string{}
@@ -538,7 +538,7 @@
 
 	// verify scan
 	var S Scanner
-	f := fset.AddFile(filepath.Join("dir", "TestLineComments"), fset.Base(), len(src))
+	f := token.NewFile(filepath.Join("dir", "TestLineComments"), 1, len(src))
 	S.Init(f, []byte(src), nil, dontInsertCommas)
 	for _, s := range segs {
 		p, _, lit := S.Scan()
@@ -562,7 +562,7 @@
 
 	// 1st init
 	src1 := "false true { }"
-	f1 := fset.AddFile("src1", fset.Base(), len(src1))
+	f1 := token.NewFile("src1", 1, len(src1))
 	s.Init(f1, []byte(src1), nil, dontInsertCommas)
 	if f1.Size() != len(src1) {
 		t.Errorf("bad file size: got %d, expected %d", f1.Size(), len(src1))
@@ -576,7 +576,7 @@
 
 	// 2nd init
 	src2 := "null true { ]"
-	f2 := fset.AddFile("src2", fset.Base(), len(src2))
+	f2 := token.NewFile("src2", 1, len(src2))
 	s.Init(f2, []byte(src2), nil, dontInsertCommas)
 	if f2.Size() != len(src2) {
 		t.Errorf("bad file size: got %d, expected %d", f2.Size(), len(src2))
@@ -608,7 +608,7 @@
 	for i, src := range sources {
 		name := fmt.Sprintf("tsrc%d", i)
 		t.Run(name, func(t *testing.T) {
-			f := fset.AddFile(name, fset.Base(), len(src))
+			f := token.NewFile(name, 1, len(src))
 
 			// verify scan
 			var s Scanner
@@ -651,7 +651,7 @@
 	eh := func(pos token.Position, msg string) { list.AddNew(pos, msg) }
 
 	var s Scanner
-	s.Init(fset.AddFile("File1", fset.Base(), len(src)), []byte(src), eh, dontInsertCommas)
+	s.Init(token.NewFile("File1", 1, len(src)), []byte(src), eh, dontInsertCommas)
 	for {
 		if _, tok, _ := s.Scan(); tok == token.EOF {
 			break
@@ -695,7 +695,7 @@
 		h.msg = msg
 		h.pos = pos
 	}
-	s.Init(fset.AddFile("", fset.Base(), len(src)), []byte(src), eh, ScanComments|dontInsertCommas)
+	s.Init(token.NewFile("", 1, len(src)), []byte(src), eh, ScanComments|dontInsertCommas)
 	_, tok0, lit0 := s.Scan()
 	if tok0 != tok {
 		t.Errorf("%q: got %s, expected %s", src, tok0, tok)
@@ -840,12 +840,12 @@
 		}
 	`
 	var s Scanner
-	s.Init(fset.AddFile("", fset.Base(), len(src)), []byte(src), nil, 0)
+	s.Init(token.NewFile("", 1, len(src)), []byte(src), nil, 0)
 	for {
 		pos, tok, lit := s.Scan()
 		class := tokenclass(tok)
 		if lit != "" && class != keyword && class != literal && tok != token.COMMA {
-			t.Errorf("%s: tok = %s, lit = %q", fset.Position(pos), tok, lit)
+			t.Errorf("%s: tok = %s, lit = %q", pos, tok, lit)
 		}
 		if tok <= token.EOF {
 			break
@@ -855,8 +855,7 @@
 
 func BenchmarkScan(b *testing.B) {
 	b.StopTimer()
-	fset := token.NewFileSet()
-	file := fset.AddFile("", fset.Base(), len(source))
+	file := token.NewFile("", 1, len(source))
 	var s Scanner
 	b.StartTimer()
 	for i := 0; i < b.N; i++ {
@@ -877,8 +876,7 @@
 	if err != nil {
 		panic(err)
 	}
-	fset := token.NewFileSet()
-	file := fset.AddFile(filename, fset.Base(), len(src))
+	file := token.NewFile(filename, 1, len(src))
 	b.SetBytes(int64(len(src)))
 	var s Scanner
 	b.StartTimer()
diff --git a/cue/token/position.go b/cue/token/position.go
index 7f58a12..ad51afd 100644
--- a/cue/token/position.go
+++ b/cue/token/position.go
@@ -58,30 +58,43 @@
 	return s
 }
 
-// Pos is a compact encoding of a source position within a file set, as well as
+// Pos is a compact encoding of a source position within a file, as well as
 // relative positioning information. It can be converted into a Position for a
 // more convenient, but much larger, representation.
 //
-// The Pos value for a given file is a number in the range [base, base+size],
-// where base and size are specified when adding the file to the file set via
-// AddFile.
-//
-// To create the Pos value for a specific source offset (measured in bytes),
-// first add the respective file to the current file set using FileSet.AddFile
-// and then call File.Pos(offset) for that file. Given a Pos value p for a
-// specific file set fset, the corresponding Position value is obtained by
-// calling fset.Position(p).
-//
-// Pos values can be compared directly with the usual comparison operators: If
-// two Pos values p and q are in the same file, comparing p and q is equivalent
-// to comparing the respective source file offsets. If p and q are in different
-// files, p < q is true if the file implied by p was added to the respective
-// file set before the file implied by cue.
 type Pos struct {
 	file   *File
 	offset int
 }
 
+// File returns the file that contains the position p or nil if there is no
+// such file (for instance for p == NoPos).
+//
+func (p Pos) File() *File {
+	if p.index() == 0 {
+		return nil
+	}
+	return p.file
+}
+
+func (p Pos) Line() int {
+	if p.file == nil {
+		return 0
+	}
+	return p.Position().Line
+}
+
+func (p Pos) Position() Position {
+	if p.file == nil {
+		return Position{}
+	}
+	return p.file.Position(p)
+}
+
+func (p Pos) String() string {
+	return p.Position().String()
+}
+
 // NoPos is the zero value for Pos; there is no file and line information
 // associated with it, and NoPos().IsValid() is false. NoPos is always
 // smaller than any other Pos value. The corresponding Position value
@@ -180,16 +193,24 @@
 // A File is a handle for a file belonging to a FileSet.
 // A File has a name, size, and line offset table.
 type File struct {
-	set  *FileSet
-	name string // file name as provided to AddFile
-	base index  // Pos index range for this file is [base...base+size]
-	size index  // file size as provided to AddFile
+	mutex sync.RWMutex
+	name  string // file name as provided to AddFile
+	base  index  // Pos index range for this file is [base...base+size]
+	size  index  // file size as provided to AddFile
 
 	// lines and infos are protected by set.mutex
 	lines []index // lines contains the offset of the first character for each line (the first entry is always 0)
 	infos []lineInfo
 }
 
+// NewFile returns a new file.
+func NewFile(filename string, base, size int) *File {
+	if base < 0 {
+		base = 1
+	}
+	return &File{sync.RWMutex{}, filename, index(base), index(size), []index{0}, nil}
+}
+
 // Name returns the file name of file f as registered with AddFile.
 func (f *File) Name() string {
 	return f.name
@@ -207,9 +228,9 @@
 
 // LineCount returns the number of lines in file f.
 func (f *File) LineCount() int {
-	f.set.mutex.RLock()
+	f.mutex.RLock()
 	n := len(f.lines)
-	f.set.mutex.RUnlock()
+	f.mutex.RUnlock()
 	return n
 }
 
@@ -219,11 +240,11 @@
 //
 func (f *File) AddLine(offset int) {
 	x := index(offset)
-	f.set.mutex.Lock()
+	f.mutex.Lock()
 	if i := len(f.lines); (i == 0 || f.lines[i-1] < x) && x < f.size {
 		f.lines = append(f.lines, x)
 	}
-	f.set.mutex.Unlock()
+	f.mutex.Unlock()
 }
 
 // MergeLine merges a line with the following line. It is akin to replacing
@@ -235,8 +256,8 @@
 	if line <= 0 {
 		panic("illegal line number (line numbering starts at 1)")
 	}
-	f.set.mutex.Lock()
-	defer f.set.mutex.Unlock()
+	f.mutex.Lock()
+	defer f.mutex.Unlock()
 	if line >= len(f.lines) {
 		panic("illegal line number")
 	}
@@ -268,12 +289,12 @@
 	}
 
 	// set lines table
-	f.set.mutex.Lock()
+	f.mutex.Lock()
 	f.lines = f.lines[:0]
 	for _, l := range lines {
 		f.lines = append(f.lines, index(l))
 	}
-	f.set.mutex.Unlock()
+	f.mutex.Unlock()
 	return true
 }
 
@@ -293,9 +314,9 @@
 	}
 
 	// set lines table
-	f.set.mutex.Lock()
+	f.mutex.Lock()
 	f.lines = lines
-	f.set.mutex.Unlock()
+	f.mutex.Unlock()
 }
 
 // A lineInfo object describes alternative file and line number
@@ -318,11 +339,11 @@
 //
 func (f *File) AddLineInfo(offset int, filename string, line int) {
 	x := index(offset)
-	f.set.mutex.Lock()
+	f.mutex.Lock()
 	if i := len(f.infos); i == 0 || index(f.infos[i-1].Offset) < x && x < f.size {
 		f.infos = append(f.infos, lineInfo{offset, filename, line})
 	}
-	f.set.mutex.Unlock()
+	f.mutex.Unlock()
 }
 
 // Pos returns the Pos value for the given file offset;
@@ -428,132 +449,6 @@
 	}
 }
 
-// Base returns the minimum base offset that must be provided to
-// AddFile when adding the next file.
-func (s *FileSet) Base() int {
-	s.mutex.RLock()
-	b := s.base
-	s.mutex.RUnlock()
-	return b
-
-}
-
-// AddFile adds a new file with a given filename, base offset, and file size
-// to the file set s and returns the file. Multiple files may have the same
-// name. The base offset must not be smaller than the FileSet's Base(), and
-// size must not be negative. As a special case, if a negative base is provided,
-// the current value of the FileSet's Base() is used instead.
-//
-// Adding the file will set the file set's Base() value to base + size + 1
-// as the minimum base value for the next file. The following relationship
-// exists between a Pos value p for a given file offset offs:
-//
-//	int(p) = base + offs
-//
-// with offs in the range [0, size] and thus p in the range [base, base+size].
-// For convenience, File.Pos may be used to create file-specific position
-// values from a file offset.
-func (s *FileSet) AddFile(filename string, base, size int) *File {
-	s.mutex.Lock()
-	defer s.mutex.Unlock()
-	if base < 0 {
-		base = s.base
-	}
-	if base < s.base || size < 0 {
-		panic("illegal base or size")
-	}
-	// base >= s.base && size >= 0
-	f := &File{s, filename, index(base), index(size), []index{0}, nil}
-	base += size + 1 // +1 because EOF also has a position
-	if base < 0 {
-		panic("token.Pos offset overflow (> 2G of source code in file set)")
-	}
-	// add the file to the file set
-	s.base = base
-	s.files = append(s.files, f)
-	s.last = f
-	return f
-}
-
-// Iterate calls f for the files in the file set in the order they were added
-// until f returns false.
-//
-func (s *FileSet) Iterate(f func(*File) bool) {
-	for i := 0; ; i++ {
-		var file *File
-		s.mutex.RLock()
-		if i < len(s.files) {
-			file = s.files[i]
-		}
-		s.mutex.RUnlock()
-		if file == nil || !f(file) {
-			break
-		}
-	}
-}
-
-func searchFiles(a []*File, x index) int {
-	return sort.Search(len(a), func(i int) bool { return a[i].base > x }) - 1
-}
-
-func (s *FileSet) file(p Pos) *File {
-	x := p.index()
-	s.mutex.RLock()
-	// common case: p is in last file
-	if f := s.last; f != nil && f.base <= x && x <= f.base+f.size {
-		s.mutex.RUnlock()
-		return f
-	}
-	// p is not in last file - search all files
-	if i := searchFiles(s.files, x); i >= 0 {
-		f := s.files[i]
-		// f.base <= int(p) by definition of searchFiles
-		if x <= f.base+f.size {
-			s.mutex.RUnlock()
-			s.mutex.Lock()
-			s.last = f // race is ok - s.last is only a cache
-			s.mutex.Unlock()
-			return f
-		}
-	}
-	s.mutex.RUnlock()
-	return nil
-}
-
-// File returns the file that contains the position p.
-// If no such file is found (for instance for p == NoPos),
-// the result is nil.
-//
-func (s *FileSet) File(p Pos) (f *File) {
-	if p.index() != 0 {
-		f = s.file(p)
-	}
-	return
-}
-
-// PositionFor converts a Pos p in the fileset into a Position value.
-// If adjusted is set, the position may be adjusted by position-altering
-// //line comments; otherwise those comments are ignored.
-// p must be a Pos value in s or NoPos.
-//
-func (s *FileSet) PositionFor(p Pos, adjusted bool) (pos Position) {
-	if p.index() != 0 {
-		if f := s.file(p); f != nil {
-			s.mutex.RLock()
-			pos = f.position(p, adjusted)
-			s.mutex.RUnlock()
-		}
-	}
-	return
-}
-
-// Position converts a Pos p in the fileset into a Position value.
-// Calling s.Position(p) is equivalent to calling s.PositionFor(p, true).
-//
-func (s *FileSet) Position(p Pos) (pos Position) {
-	return s.PositionFor(p, true)
-}
-
 // -----------------------------------------------------------------------------
 // Helper functions
 
diff --git a/cue/token/position_test.go b/cue/token/position_test.go
index 39ca970..a855ebd 100644
--- a/cue/token/position_test.go
+++ b/cue/token/position_test.go
@@ -16,8 +16,6 @@
 
 import (
 	"fmt"
-	"math/rand"
-	"sync"
 	"testing"
 )
 
@@ -40,10 +38,7 @@
 	if NoPos.IsValid() {
 		t.Errorf("NoPos should not be valid")
 	}
-	var fset *FileSet
-	checkPos(t, "nil NoPos", fset.Position(NoPos), Position{})
-	fset = NewFileSet()
-	checkPos(t, "fset NoPos", fset.Position(NoPos), Position{})
+	checkPos(t, "nil NoPos", NoPos.Position(), Position{})
 }
 
 var tests = []struct {
@@ -82,8 +77,8 @@
 		}
 		line, col := linecol(lines, offs)
 		msg := fmt.Sprintf("%s (offs = %d, p = %d)", f.Name(), offs, p.offset)
-		checkPos(t, msg, f.Position(f.Pos(offs, 0)), Position{f.Name(), offs, line, col})
-		checkPos(t, msg, fset.Position(p), Position{f.Name(), offs, line, col})
+		checkPos(t, msg, f.Pos(offs, 0).Position(), Position{f.Name(), offs, line, col})
+		checkPos(t, msg, p.Position(), Position{f.Name(), offs, line, col})
 	}
 }
 
@@ -107,14 +102,14 @@
 		}
 
 		// add file and verify name and size
-		f := fset.AddFile(test.filename, fset.Base()+delta, test.size)
+		f := NewFile(test.filename, 1+delta, test.size)
 		if f.Name() != test.filename {
 			t.Errorf("got filename %q; want %q", f.Name(), test.filename)
 		}
 		if f.Size() != test.size {
 			t.Errorf("%s: got file size %d; want %d", f.Name(), f.Size(), test.size)
 		}
-		if fset.File(f.Pos(0, 0)) != f {
+		if f.Pos(0, 0).file != f {
 			t.Errorf("%s: f.Pos(0, 0) was not found in f", f.Name())
 		}
 
@@ -156,8 +151,7 @@
 }
 
 func TestLineInfo(t *testing.T) {
-	fset := NewFileSet()
-	f := fset.AddFile("foo", fset.Base(), 500)
+	f := NewFile("foo", 1, 500)
 	lines := []int{0, 42, 77, 100, 210, 220, 277, 300, 333, 401}
 	// add lines individually and provide alternative line information
 	for _, offs := range lines {
@@ -170,112 +164,10 @@
 		_, col := linecol(lines, offs)
 		msg := fmt.Sprintf("%s (offs = %d, p = %d)", f.Name(), offs, p.offset)
 		checkPos(t, msg, f.Position(f.Pos(offs, 0)), Position{"bar", offs, 42, col})
-		checkPos(t, msg, fset.Position(p), Position{"bar", offs, 42, col})
+		checkPos(t, msg, p.Position(), Position{"bar", offs, 42, col})
 	}
 }
 
-func TestFiles(t *testing.T) {
-	fset := NewFileSet()
-	for i, test := range tests {
-		base := fset.Base()
-		if i%2 == 1 {
-			// Setting a negative base is equivalent to
-			// fset.Base(), so test some of each.
-			base = -1
-		}
-		fset.AddFile(test.filename, base, test.size)
-		j := 0
-		fset.Iterate(func(f *File) bool {
-			if f.Name() != tests[j].filename {
-				t.Errorf("got filename = %s; want %s", f.Name(), tests[j].filename)
-			}
-			j++
-			return true
-		})
-		if j != i+1 {
-			t.Errorf("got %d files; want %d", j, i+1)
-		}
-	}
-}
-
-// FileSet.File should return nil if Pos is past the end of the FileSet.
-func TestFileSetPastEnd(t *testing.T) {
-	fset := NewFileSet()
-	for _, test := range tests {
-		fset.AddFile(test.filename, fset.Base(), test.size)
-	}
-	if f := fset.File(Pos{nil, toPos(index(fset.Base()))}); f != nil {
-		t.Errorf("got %v, want nil", f)
-	}
-}
-
-func TestFileSetCacheUnlikely(t *testing.T) {
-	fset := NewFileSet()
-	offsets := make(map[string]index)
-	for _, test := range tests {
-		offsets[test.filename] = index(fset.Base())
-		fset.AddFile(test.filename, fset.Base(), test.size)
-	}
-	for file, pos := range offsets {
-		f := fset.File(Pos{nil, toPos(pos)})
-		if f.Name() != file {
-			t.Errorf("got %q at position %d, want %q", f.Name(), pos, file)
-		}
-	}
-}
-
-// issue 4345. Test that concurrent use of FileSet.Pos does not trigger a
-// race in the FileSet position cache.
-func TestFileSetRace(t *testing.T) {
-	fset := NewFileSet()
-	for i := 0; i < 100; i++ {
-		fset.AddFile(fmt.Sprintf("file-%d", i), fset.Base(), 1031)
-	}
-	max := int32(fset.Base())
-	var stop sync.WaitGroup
-	r := rand.New(rand.NewSource(7))
-	for i := 0; i < 2; i++ {
-		r := rand.New(rand.NewSource(r.Int63()))
-		stop.Add(1)
-		go func() {
-			for i := 0; i < 1000; i++ {
-				fset.Position(Pos{nil, int(r.Int31n(max))})
-			}
-			stop.Done()
-		}()
-	}
-	stop.Wait()
-}
-
-// issue 16548. Test that concurrent use of File.AddLine and FileSet.PositionFor
-// does not trigger a race in the FileSet position cache.
-func TestFileSetRace2(t *testing.T) {
-	const N = 1e3
-	var (
-		fset = NewFileSet()
-		file = fset.AddFile("", -1, N)
-		ch   = make(chan int, 2)
-	)
-
-	go func() {
-		for i := 0; i < N; i++ {
-			file.AddLine(i)
-		}
-		ch <- 1
-	}()
-
-	go func() {
-		pos := file.Pos(0, 0)
-		for i := 0; i < N; i++ {
-			fset.PositionFor(pos, false)
-		}
-		ch <- 1
-	}()
-
-	<-ch
-	<-ch
-}
-
 func TestPositionFor(t *testing.T) {
 	src := []byte(`
 foo
@@ -288,8 +180,7 @@
 `)
 
 	const filename = "foo"
-	fset := NewFileSet()
-	f := fset.AddFile(filename, fset.Base(), len(src))
+	f := NewFile(filename, 1, len(src))
 	f.SetLinesForContent(src)
 
 	// verify position info
diff --git a/cue/types.go b/cue/types.go
index 1df3477..e7a87c4 100644
--- a/cue/types.go
+++ b/cue/types.go
@@ -755,7 +755,7 @@
 		return token.Position{}
 	}
 	pos := v.Source().Pos()
-	return v.idx.fset.Position(pos)
+	return pos.Position()
 }
 
 // IsConcrete reports whether the current value is a concrete scalar value,
diff --git a/internal/third_party/yaml/decode.go b/internal/third_party/yaml/decode.go
index fc91a8e..0626801 100644
--- a/internal/third_party/yaml/decode.go
+++ b/internal/third_party/yaml/decode.go
@@ -80,7 +80,7 @@
 	if err != nil {
 		return nil, err
 	}
-	info := fset.AddFile(filename, -1, len(b))
+	info := token.NewFile(filename, -1, len(b))
 	info.SetLinesForContent(b)
 	p := parser{info: info}
 	if !yaml_parser_initialize(&p.parser, filename) {