internal: remove unneeded FileSets

This change replaces various uses of FileSet with either
nothing (when the parameter wasn't really needed) or token.File.

Notably, astutil.Imports was being used to extract the imports
of a file (available at ast.File.Imports), forcing a number
of wrappers to have a FileSet parameter.

Also, simplify various expressions file.Position().Line to file.Line().

Change-Id: I19fe86a18aba50352275f77ed737513744d3930c
Reviewed-on: https://go-review.googlesource.com/c/tools/+/410366
Run-TryBot: Alan Donovan <adonovan@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Reviewed-by: Robert Findley <rfindley@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
diff --git a/internal/analysisinternal/analysis.go b/internal/analysisinternal/analysis.go
index 78ee2c0..3f1e573 100644
--- a/internal/analysisinternal/analysis.go
+++ b/internal/analysisinternal/analysis.go
@@ -11,9 +11,8 @@
 	"go/ast"
 	"go/token"
 	"go/types"
-	"strings"
+	"strconv"
 
-	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/internal/lsp/fuzzy"
 )
 
@@ -37,7 +36,7 @@
 	return end
 }
 
-func ZeroValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
+func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
 	under := typ
 	if n, ok := typ.(*types.Named); ok {
 		under = n.Underlying()
@@ -57,7 +56,7 @@
 	case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array:
 		return ast.NewIdent("nil")
 	case *types.Struct:
-		texpr := TypeExpr(fset, f, pkg, typ) // typ because we want the name here.
+		texpr := TypeExpr(f, pkg, typ) // typ because we want the name here.
 		if texpr == nil {
 			return nil
 		}
@@ -81,7 +80,7 @@
 	}
 }
 
-func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
+func TypeExpr(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
 	switch t := typ.(type) {
 	case *types.Basic:
 		switch t.Kind() {
@@ -91,7 +90,7 @@
 			return ast.NewIdent(t.Name())
 		}
 	case *types.Pointer:
-		x := TypeExpr(fset, f, pkg, t.Elem())
+		x := TypeExpr(f, pkg, t.Elem())
 		if x == nil {
 			return nil
 		}
@@ -100,7 +99,7 @@
 			X:  x,
 		}
 	case *types.Array:
-		elt := TypeExpr(fset, f, pkg, t.Elem())
+		elt := TypeExpr(f, pkg, t.Elem())
 		if elt == nil {
 			return nil
 		}
@@ -112,7 +111,7 @@
 			Elt: elt,
 		}
 	case *types.Slice:
-		elt := TypeExpr(fset, f, pkg, t.Elem())
+		elt := TypeExpr(f, pkg, t.Elem())
 		if elt == nil {
 			return nil
 		}
@@ -120,8 +119,8 @@
 			Elt: elt,
 		}
 	case *types.Map:
-		key := TypeExpr(fset, f, pkg, t.Key())
-		value := TypeExpr(fset, f, pkg, t.Elem())
+		key := TypeExpr(f, pkg, t.Key())
+		value := TypeExpr(f, pkg, t.Elem())
 		if key == nil || value == nil {
 			return nil
 		}
@@ -134,7 +133,7 @@
 		if t.Dir() == types.SendRecv {
 			dir = ast.SEND | ast.RECV
 		}
-		value := TypeExpr(fset, f, pkg, t.Elem())
+		value := TypeExpr(f, pkg, t.Elem())
 		if value == nil {
 			return nil
 		}
@@ -145,7 +144,7 @@
 	case *types.Signature:
 		var params []*ast.Field
 		for i := 0; i < t.Params().Len(); i++ {
-			p := TypeExpr(fset, f, pkg, t.Params().At(i).Type())
+			p := TypeExpr(f, pkg, t.Params().At(i).Type())
 			if p == nil {
 				return nil
 			}
@@ -160,7 +159,7 @@
 		}
 		var returns []*ast.Field
 		for i := 0; i < t.Results().Len(); i++ {
-			r := TypeExpr(fset, f, pkg, t.Results().At(i).Type())
+			r := TypeExpr(f, pkg, t.Results().At(i).Type())
 			if r == nil {
 				return nil
 			}
@@ -184,13 +183,12 @@
 			return ast.NewIdent(t.Obj().Name())
 		}
 		pkgName := t.Obj().Pkg().Name()
+
 		// If the file already imports the package under another name, use that.
-		for _, group := range astutil.Imports(fset, f) {
-			for _, cand := range group {
-				if strings.Trim(cand.Path.Value, `"`) == t.Obj().Pkg().Path() {
-					if cand.Name != nil && cand.Name.Name != "" {
-						pkgName = cand.Name.Name
-					}
+		for _, cand := range f.Imports {
+			if path, _ := strconv.Unquote(cand.Path.Value); path == t.Obj().Pkg().Path() {
+				if cand.Name != nil && cand.Name.Name != "" {
+					pkgName = cand.Name.Name
 				}
 			}
 		}
diff --git a/internal/imports/imports.go b/internal/imports/imports.go
index 7902672..95a8838 100644
--- a/internal/imports/imports.go
+++ b/internal/imports/imports.go
@@ -103,12 +103,17 @@
 	return formatFile(fileSet, file, src, nil, opt)
 }
 
-func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
-	mergeImports(fileSet, file)
-	sortImports(opt.LocalPrefix, fileSet, file)
-	imps := astutil.Imports(fileSet, file)
+// formatFile formats the file syntax tree.
+// It may mutate the token.FileSet.
+//
+// If an adjust function is provided, it is called after formatting
+// with the original source (formatFile's src parameter) and the
+// formatted file, and returns the postpocessed result.
+func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
+	mergeImports(file)
+	sortImports(opt.LocalPrefix, fset.File(file.Pos()), file)
 	var spacesBefore []string // import paths we need spaces before
-	for _, impSection := range imps {
+	for _, impSection := range astutil.Imports(fset, file) {
 		// Within each block of contiguous imports, see if any
 		// import lines are in different group numbers. If so,
 		// we'll need to put a space between them so it's
@@ -132,7 +137,7 @@
 	printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth}
 
 	var buf bytes.Buffer
-	err := printConfig.Fprint(&buf, fileSet, file)
+	err := printConfig.Fprint(&buf, fset, file)
 	if err != nil {
 		return nil, err
 	}
diff --git a/internal/imports/sortimports.go b/internal/imports/sortimports.go
index c5a70c0..85144db 100644
--- a/internal/imports/sortimports.go
+++ b/internal/imports/sortimports.go
@@ -3,6 +3,7 @@
 // license that can be found in the LICENSE file.
 
 // Hacked up copy of go/ast/import.go
+// Modified to use a single token.File in preference to a FileSet.
 
 package imports
 
@@ -16,7 +17,9 @@
 
 // sortImports sorts runs of consecutive import lines in import blocks in f.
 // It also removes duplicate imports when it is possible to do so without data loss.
-func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) {
+//
+// It may mutate the token.File.
+func sortImports(localPrefix string, tokFile *token.File, f *ast.File) {
 	for i, d := range f.Decls {
 		d, ok := d.(*ast.GenDecl)
 		if !ok || d.Tok != token.IMPORT {
@@ -39,21 +42,21 @@
 		i := 0
 		specs := d.Specs[:0]
 		for j, s := range d.Specs {
-			if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line {
+			if j > i && tokFile.Line(s.Pos()) > 1+tokFile.Line(d.Specs[j-1].End()) {
 				// j begins a new run.  End this one.
-				specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:j])...)
+				specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:j])...)
 				i = j
 			}
 		}
-		specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:])...)
+		specs = append(specs, sortSpecs(localPrefix, tokFile, f, d.Specs[i:])...)
 		d.Specs = specs
 
 		// Deduping can leave a blank line before the rparen; clean that up.
 		if len(d.Specs) > 0 {
 			lastSpec := d.Specs[len(d.Specs)-1]
-			lastLine := fset.PositionFor(lastSpec.Pos(), false).Line
-			if rParenLine := fset.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 {
-				fset.File(d.Rparen).MergeLine(rParenLine - 1)
+			lastLine := tokFile.PositionFor(lastSpec.Pos(), false).Line
+			if rParenLine := tokFile.PositionFor(d.Rparen, false).Line; rParenLine > lastLine+1 {
+				tokFile.MergeLine(rParenLine - 1) // has side effects!
 			}
 		}
 	}
@@ -62,7 +65,7 @@
 // mergeImports merges all the import declarations into the first one.
 // Taken from golang.org/x/tools/ast/astutil.
 // This does not adjust line numbers properly
-func mergeImports(fset *token.FileSet, f *ast.File) {
+func mergeImports(f *ast.File) {
 	if len(f.Decls) <= 1 {
 		return
 	}
@@ -144,7 +147,9 @@
 	End   token.Pos
 }
 
-func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec {
+// sortSpecs sorts the import specs within each import decl.
+// It may mutate the token.File.
+func sortSpecs(localPrefix string, tokFile *token.File, f *ast.File, specs []ast.Spec) []ast.Spec {
 	// Can't short-circuit here even if specs are already sorted,
 	// since they might yet need deduplication.
 	// A lone import, however, may be safely ignored.
@@ -160,7 +165,7 @@
 
 	// Identify comments in this range.
 	// Any comment from pos[0].Start to the final line counts.
-	lastLine := fset.Position(pos[len(pos)-1].End).Line
+	lastLine := tokFile.Line(pos[len(pos)-1].End)
 	cstart := len(f.Comments)
 	cend := len(f.Comments)
 	for i, g := range f.Comments {
@@ -170,7 +175,7 @@
 		if i < cstart {
 			cstart = i
 		}
-		if fset.Position(g.End()).Line > lastLine {
+		if tokFile.Line(g.End()) > lastLine {
 			cend = i
 			break
 		}
@@ -203,7 +208,7 @@
 			deduped = append(deduped, s)
 		} else {
 			p := s.Pos()
-			fset.File(p).MergeLine(fset.Position(p).Line)
+			tokFile.MergeLine(tokFile.Line(p)) // has side effects!
 		}
 	}
 	specs = deduped
@@ -234,21 +239,21 @@
 
 	// Fixup comments can insert blank lines, because import specs are on different lines.
 	// We remove those blank lines here by merging import spec to the first import spec line.
-	firstSpecLine := fset.Position(specs[0].Pos()).Line
+	firstSpecLine := tokFile.Line(specs[0].Pos())
 	for _, s := range specs[1:] {
 		p := s.Pos()
-		line := fset.File(p).Line(p)
+		line := tokFile.Line(p)
 		for previousLine := line - 1; previousLine >= firstSpecLine; {
 			// MergeLine can panic. Avoid the panic at the cost of not removing the blank line
 			// golang/go#50329
-			if previousLine > 0 && previousLine < fset.File(p).LineCount() {
-				fset.File(p).MergeLine(previousLine)
+			if previousLine > 0 && previousLine < tokFile.LineCount() {
+				tokFile.MergeLine(previousLine) // has side effects!
 				previousLine--
 			} else {
 				// try to gather some data to diagnose how this could happen
 				req := "Please report what the imports section of your go file looked like."
 				log.Printf("panic avoided: first:%d line:%d previous:%d max:%d. %s",
-					firstSpecLine, line, previousLine, fset.File(p).LineCount(), req)
+					firstSpecLine, line, previousLine, tokFile.LineCount(), req)
 			}
 		}
 	}
diff --git a/internal/lsp/analysis/fillreturns/fillreturns.go b/internal/lsp/analysis/fillreturns/fillreturns.go
index 3299f74..72fe65d 100644
--- a/internal/lsp/analysis/fillreturns/fillreturns.go
+++ b/internal/lsp/analysis/fillreturns/fillreturns.go
@@ -193,8 +193,7 @@
 				// generate a zero value.
 				value := analysisinternal.FindBestMatch(retTyp.String(), idents)
 				if value == nil {
-					value = analysisinternal.ZeroValue(
-						pass.Fset, file, pass.Pkg, retTyp)
+					value = analysisinternal.ZeroValue(file, pass.Pkg, retTyp)
 				}
 				if value == nil {
 					return nil, nil
diff --git a/internal/lsp/analysis/fillstruct/fillstruct.go b/internal/lsp/analysis/fillstruct/fillstruct.go
index 6987f78..f160d44 100644
--- a/internal/lsp/analysis/fillstruct/fillstruct.go
+++ b/internal/lsp/analysis/fillstruct/fillstruct.go
@@ -256,7 +256,7 @@
 			// NOTE: We currently match on the name of the field key rather than the field type.
 			value := analysisinternal.FindBestMatch(obj.Field(i).Name(), idents)
 			if value == nil {
-				value = populateValue(fset, file, pkg, fieldTyp)
+				value = populateValue(file, pkg, fieldTyp)
 			}
 			if value == nil {
 				return nil, nil
@@ -354,7 +354,7 @@
 //
 // The reasoning here is that users will call fillstruct with the intention of
 // initializing the struct, in which case setting these fields to nil has no effect.
-func populateValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
+func populateValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr {
 	under := typ
 	if n, ok := typ.(*types.Named); ok {
 		under = n.Underlying()
@@ -374,8 +374,8 @@
 			panic("unknown basic type")
 		}
 	case *types.Map:
-		k := analysisinternal.TypeExpr(fset, f, pkg, u.Key())
-		v := analysisinternal.TypeExpr(fset, f, pkg, u.Elem())
+		k := analysisinternal.TypeExpr(f, pkg, u.Key())
+		v := analysisinternal.TypeExpr(f, pkg, u.Elem())
 		if k == nil || v == nil {
 			return nil
 		}
@@ -386,7 +386,7 @@
 			},
 		}
 	case *types.Slice:
-		s := analysisinternal.TypeExpr(fset, f, pkg, u.Elem())
+		s := analysisinternal.TypeExpr(f, pkg, u.Elem())
 		if s == nil {
 			return nil
 		}
@@ -396,7 +396,7 @@
 			},
 		}
 	case *types.Array:
-		a := analysisinternal.TypeExpr(fset, f, pkg, u.Elem())
+		a := analysisinternal.TypeExpr(f, pkg, u.Elem())
 		if a == nil {
 			return nil
 		}
@@ -409,7 +409,7 @@
 			},
 		}
 	case *types.Chan:
-		v := analysisinternal.TypeExpr(fset, f, pkg, u.Elem())
+		v := analysisinternal.TypeExpr(f, pkg, u.Elem())
 		if v == nil {
 			return nil
 		}
@@ -427,7 +427,7 @@
 			},
 		}
 	case *types.Struct:
-		s := analysisinternal.TypeExpr(fset, f, pkg, typ)
+		s := analysisinternal.TypeExpr(f, pkg, typ)
 		if s == nil {
 			return nil
 		}
@@ -437,7 +437,7 @@
 	case *types.Signature:
 		var params []*ast.Field
 		for i := 0; i < u.Params().Len(); i++ {
-			p := analysisinternal.TypeExpr(fset, f, pkg, u.Params().At(i).Type())
+			p := analysisinternal.TypeExpr(f, pkg, u.Params().At(i).Type())
 			if p == nil {
 				return nil
 			}
@@ -452,7 +452,7 @@
 		}
 		var returns []*ast.Field
 		for i := 0; i < u.Results().Len(); i++ {
-			r := analysisinternal.TypeExpr(fset, f, pkg, u.Results().At(i).Type())
+			r := analysisinternal.TypeExpr(f, pkg, u.Results().At(i).Type())
 			if r == nil {
 				return nil
 			}
@@ -487,7 +487,7 @@
 		default:
 			return &ast.UnaryExpr{
 				Op: token.AND,
-				X:  populateValue(fset, f, pkg, u.Elem()),
+				X:  populateValue(f, pkg, u.Elem()),
 			}
 		}
 	case *types.Interface:
diff --git a/internal/lsp/analysis/undeclaredname/undeclared.go b/internal/lsp/analysis/undeclaredname/undeclared.go
index 22b552c..faa1409 100644
--- a/internal/lsp/analysis/undeclaredname/undeclared.go
+++ b/internal/lsp/analysis/undeclaredname/undeclared.go
@@ -268,7 +268,7 @@
 			Names: []*ast.Ident{
 				ast.NewIdent(name),
 			},
-			Type: analysisinternal.TypeExpr(fset, file, pkg, paramTypes[i]),
+			Type: analysisinternal.TypeExpr(file, pkg, paramTypes[i]),
 		})
 	}
 
diff --git a/internal/lsp/source/extract.go b/internal/lsp/source/extract.go
index 1272ff3..90999d8 100644
--- a/internal/lsp/source/extract.go
+++ b/internal/lsp/source/extract.go
@@ -330,7 +330,7 @@
 			// The blank identifier is always a local variable
 			continue
 		}
-		typ := analysisinternal.TypeExpr(fset, file, pkg, v.obj.Type())
+		typ := analysisinternal.TypeExpr(file, pkg, v.obj.Type())
 		if typ == nil {
 			return nil, fmt.Errorf("nil AST expression for type: %v", v.obj.Name())
 		}
@@ -1130,7 +1130,7 @@
 				return nil, nil, fmt.Errorf(
 					"failed type conversion, AST expression: %T", field.Type)
 			}
-			expr := analysisinternal.TypeExpr(fset, file, pkg, typ)
+			expr := analysisinternal.TypeExpr(file, pkg, typ)
 			if expr == nil {
 				return nil, nil, fmt.Errorf("nil AST expression")
 			}
@@ -1138,10 +1138,9 @@
 			name, idx = generateAvailableIdentifier(pos, file,
 				path, info, "returnValue", idx)
 			retVars = append(retVars, &returnVariable{
-				name: ast.NewIdent(name),
-				decl: &ast.Field{Type: expr},
-				zeroVal: analysisinternal.ZeroValue(
-					fset, file, pkg, typ),
+				name:    ast.NewIdent(name),
+				decl:    &ast.Field{Type: expr},
+				zeroVal: analysisinternal.ZeroValue(file, pkg, typ),
 			})
 		}
 	}
@@ -1170,7 +1169,7 @@
 			if typ != returnType.Type {
 				continue
 			}
-			val = analysisinternal.ZeroValue(fset, file, pkg, obj.Type())
+			val = analysisinternal.ZeroValue(file, pkg, obj.Type())
 			break
 		}
 		if val == nil {
diff --git a/internal/lsp/source/stub.go b/internal/lsp/source/stub.go
index f09bbab..0d19817 100644
--- a/internal/lsp/source/stub.go
+++ b/internal/lsp/source/stub.go
@@ -19,6 +19,7 @@
 	"golang.org/x/tools/go/ast/astutil"
 	"golang.org/x/tools/internal/lsp/analysis/stubmethods"
 	"golang.org/x/tools/internal/lsp/protocol"
+	"golang.org/x/tools/internal/lsp/safetoken"
 	"golang.org/x/tools/internal/span"
 	"golang.org/x/tools/internal/typeparams"
 )
@@ -57,8 +58,8 @@
 	if err != nil {
 		return nil, fmt.Errorf("error reading concrete file source: %w", err)
 	}
-	insertPos := snapshot.FileSet().Position(nodes[1].End()).Offset
-	if insertPos >= len(concreteSrc) {
+	insertPos, err := safetoken.Offset(parsedConcreteFile.Tok, nodes[1].End())
+	if err != nil || insertPos >= len(concreteSrc) {
 		return nil, fmt.Errorf("insertion position is past the end of the file")
 	}
 	var buf bytes.Buffer
diff --git a/internal/lsp/source/util.go b/internal/lsp/source/util.go
index 06720d7..9cb2ee6 100644
--- a/internal/lsp/source/util.go
+++ b/internal/lsp/source/util.go
@@ -113,15 +113,11 @@
 	if err != nil {
 		return false
 	}
-	tok := snapshot.FileSet().File(pgf.File.Pos())
-	if tok == nil {
-		return false
-	}
 	for _, commentGroup := range pgf.File.Comments {
 		for _, comment := range commentGroup.List {
 			if matched := generatedRx.MatchString(comment.Text); matched {
 				// Check if comment is at the beginning of the line in source.
-				if pos := tok.Position(comment.Slash); pos.Column == 1 {
+				if pgf.Tok.Position(comment.Slash).Column == 1 {
 					return true
 				}
 			}