gopls/internal/lsp/source: use syntax alone in FormatVarType

FormatVarType re-formats a variable type using syntax, in order to get
accurate presentation of aliases and ellipses. However, as a result it
required typed syntax trees for the type declaration, which may exist in
a distinct package from the current package.

In the near future we may not have typed syntax trees for these
packages. We could type-check on demand, but (1) that could be costly
and (2) it may break qualification using the go/types qualifier.

Instead, perform this operation using a qualifier based on syntax
and metadata, so that we only need a fully parsed file rather than a
fully type-checked package. The resulting expressions may be inaccurate
due to built-ins, "." imported packages, or missing metadata, but that
seems acceptable for the current use-cases of this function, which are
in completion and signature help.

While doing this, add a FormatNodeWithFile helper that allows formatting
a node from a *token.File, rather than *token.FileSet. This can help us
avoid relying on a global fileset. To facilitate this, move the GetLines
helper from internal/gcimporter into a shared tokeninternal package.

For golang/go#57987

Change-Id: I3b8a5256bc2261be8b5175ee360b9336228928ac
Reviewed-on: https://go-review.googlesource.com/c/tools/+/464301
Run-TryBot: Robert Findley <rfindley@google.com>
gopls-CI: kokoro <noreply+kokoro@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Reviewed-by: Alan Donovan <adonovan@google.com>
diff --git a/gopls/internal/lsp/semantic.go b/gopls/internal/lsp/semantic.go
index 5b7655a..a3a1bca 100644
--- a/gopls/internal/lsp/semantic.go
+++ b/gopls/internal/lsp/semantic.go
@@ -220,9 +220,7 @@
 
 	ctx context.Context
 	// metadataSource is used to resolve imports
-	metadataSource interface {
-		Metadata(source.PackageID) *source.Metadata
-	}
+	metadataSource    source.MetadataSource
 	tokTypes, tokMods []string
 	pgf               *source.ParsedGoFile
 	rng               *protocol.Range
diff --git a/gopls/internal/lsp/source/call_hierarchy.go b/gopls/internal/lsp/source/call_hierarchy.go
index d9efcbe..6d67dc0 100644
--- a/gopls/internal/lsp/source/call_hierarchy.go
+++ b/gopls/internal/lsp/source/call_hierarchy.go
@@ -231,7 +231,7 @@
 		return nil, err
 	}
 
-	declNode, _ := FindDeclAndField([]*ast.File{declPGF.File}, declPos)
+	declNode, _, _ := FindDeclInfo([]*ast.File{declPGF.File}, declPos)
 	if declNode == nil {
 		// TODO(rfindley): why don't we return an error here, or even bug.Errorf?
 		return nil, nil
diff --git a/gopls/internal/lsp/source/completion/completion.go b/gopls/internal/lsp/source/completion/completion.go
index ad6e79f..61d1a5a 100644
--- a/gopls/internal/lsp/source/completion/completion.go
+++ b/gopls/internal/lsp/source/completion/completion.go
@@ -156,7 +156,8 @@
 type completer struct {
 	snapshot source.Snapshot
 	pkg      source.Package
-	qf       types.Qualifier
+	qf       types.Qualifier          // for qualifying typed expressions
+	mq       source.MetadataQualifier // for syntactic qualifying
 	opts     *completionOptions
 
 	// completionContext contains information about the trigger for this
@@ -509,6 +510,7 @@
 		pkg:      pkg,
 		snapshot: snapshot,
 		qf:       source.Qualifier(pgf.File, pkg.GetTypes(), pkg.GetTypesInfo()),
+		mq:       source.MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata()),
 		completionContext: completionContext{
 			triggerCharacter: protoContext.TriggerCharacter,
 			triggerKind:      protoContext.TriggerKind,
diff --git a/gopls/internal/lsp/source/completion/format.go b/gopls/internal/lsp/source/completion/format.go
index 16da642..dfa61a0 100644
--- a/gopls/internal/lsp/source/completion/format.go
+++ b/gopls/internal/lsp/source/completion/format.go
@@ -81,7 +81,11 @@
 		if _, ok := obj.Type().(*types.Struct); ok {
 			detail = "struct{...}" // for anonymous structs
 		} else if obj.IsField() {
-			detail = source.FormatVarType(ctx, c.snapshot, c.pkg, obj, c.qf)
+			var err error
+			detail, err = source.FormatVarType(ctx, c.snapshot, c.pkg, c.file, obj, c.qf, c.mq)
+			if err != nil {
+				return CompletionItem{}, err
+			}
 		}
 		if obj.IsField() {
 			kind = protocol.FieldCompletion
@@ -130,7 +134,10 @@
 		switch mod {
 		case invoke:
 			if sig, ok := funcType.Underlying().(*types.Signature); ok {
-				s := source.NewSignature(ctx, c.snapshot, c.pkg, sig, nil, c.qf)
+				s, err := source.NewSignature(ctx, c.snapshot, c.pkg, c.file, sig, nil, c.qf, c.mq)
+				if err != nil {
+					return CompletionItem{}, err
+				}
 				c.functionCallSnippet("", s.TypeParams(), s.Params(), &snip)
 				if sig.Results().Len() == 1 {
 					funcType = sig.Results().At(0).Type()
@@ -243,7 +250,7 @@
 		return item, nil
 	}
 
-	decl, _ := source.FindDeclAndField(pkg.GetSyntax(), obj.Pos()) // may be nil
+	decl, _, _ := source.FindDeclInfo(pkg.GetSyntax(), obj.Pos()) // may be nil
 	hover, err := source.FindHoverContext(ctx, c.snapshot, pkg, obj, decl, nil)
 	if err != nil {
 		event.Error(ctx, "failed to find Hover", err, tag.URI.Of(uri))
diff --git a/gopls/internal/lsp/source/completion/literal.go b/gopls/internal/lsp/source/completion/literal.go
index 9d4b0e6..6777f73 100644
--- a/gopls/internal/lsp/source/completion/literal.go
+++ b/gopls/internal/lsp/source/completion/literal.go
@@ -202,10 +202,18 @@
 			// If the param has no name in the signature, guess a name based
 			// on the type. Use an empty qualifier to ignore the package.
 			// For example, we want to name "http.Request" "r", not "hr".
-			name = source.FormatVarType(ctx, c.snapshot, c.pkg, p, func(p *types.Package) string {
-				return ""
-			})
-			name = abbreviateTypeName(name)
+			typeName, err := source.FormatVarType(ctx, c.snapshot, c.pkg, c.file, p,
+				func(p *types.Package) string { return "" },
+				func(source.PackageName, source.ImportPath, source.PackagePath) string { return "" })
+			if err != nil {
+				// In general, the only error we should encounter while formatting is
+				// context cancellation.
+				if ctx.Err() == nil {
+					event.Error(ctx, "formatting var type", err)
+				}
+				return
+			}
+			name = abbreviateTypeName(typeName)
 		}
 		paramNames[i] = name
 		if name != "_" {
@@ -264,7 +272,15 @@
 		// of "i int, j int".
 		if i == sig.Params().Len()-1 || !types.Identical(p.Type(), sig.Params().At(i+1).Type()) {
 			snip.WriteText(" ")
-			typeStr := source.FormatVarType(ctx, c.snapshot, c.pkg, p, c.qf)
+			typeStr, err := source.FormatVarType(ctx, c.snapshot, c.pkg, c.file, p, c.qf, c.mq)
+			if err != nil {
+				// In general, the only error we should encounter while formatting is
+				// context cancellation.
+				if ctx.Err() == nil {
+					event.Error(ctx, "formatting var type", err)
+				}
+				return
+			}
 			if sig.Variadic() && i == sig.Params().Len()-1 {
 				typeStr = strings.Replace(typeStr, "[]", "...", 1)
 			}
@@ -314,7 +330,15 @@
 			snip.WriteText(name + " ")
 		}
 
-		text := source.FormatVarType(ctx, c.snapshot, c.pkg, r, c.qf)
+		text, err := source.FormatVarType(ctx, c.snapshot, c.pkg, c.file, r, c.qf, c.mq)
+		if err != nil {
+			// In general, the only error we should encounter while formatting is
+			// context cancellation.
+			if ctx.Err() == nil {
+				event.Error(ctx, "formatting var type", err)
+			}
+			return
+		}
 		if tp, _ := r.Type().(*typeparams.TypeParam); tp != nil && !c.typeParamInScope(tp) {
 			snip.WritePlaceholder(func(snip *snippet.Builder) {
 				snip.WriteText(text)
diff --git a/gopls/internal/lsp/source/hover.go b/gopls/internal/lsp/source/hover.go
index e67e4d8..1d48152 100644
--- a/gopls/internal/lsp/source/hover.go
+++ b/gopls/internal/lsp/source/hover.go
@@ -24,6 +24,7 @@
 	"golang.org/x/tools/go/types/typeutil"
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
+	"golang.org/x/tools/gopls/internal/span"
 	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/typeparams"
@@ -498,6 +499,41 @@
 	return str
 }
 
+// parseFull fully parses the file corresponding to position pos, referenced
+// from the given srcpkg.
+//
+// It returns the resulting ParsedGoFile as well as new pos contained in the
+// parsed file.
+func parseFull(ctx context.Context, snapshot Snapshot, srcpkg Package, pos token.Pos) (*ParsedGoFile, token.Pos, error) {
+	f := srcpkg.FileSet().File(pos)
+	if f == nil {
+		return nil, 0, bug.Errorf("internal error: no file for position %d in %s", pos, srcpkg.Metadata().ID)
+	}
+
+	uri := span.URIFromPath(f.Name())
+	fh, err := snapshot.GetFile(ctx, uri)
+	if err != nil {
+		return nil, 0, err
+	}
+
+	pgf, err := snapshot.ParseGo(ctx, fh, ParseFull)
+	if err != nil {
+		return nil, 0, err
+	}
+
+	offset, err := safetoken.Offset(f, pos)
+	if err != nil {
+		return nil, 0, bug.Errorf("offset out of bounds in %q", uri)
+	}
+
+	fullPos, err := safetoken.Pos(pgf.Tok, offset)
+	if err != nil {
+		return nil, 0, err
+	}
+
+	return pgf, fullPos, nil
+}
+
 // FindHoverContext returns a HoverContext struct for an AST node and its
 // declaration object. node should be the actual node used in type checking,
 // while fullNode could be a separate node with more complete syntactic
@@ -637,7 +673,7 @@
 				break
 			}
 
-			_, field := FindDeclAndField(pkg.GetSyntax(), obj.Pos())
+			_, _, field := FindDeclInfo(pkg.GetSyntax(), obj.Pos())
 			if field != nil {
 				comment := field.Doc
 				if comment.Text() == "" {
@@ -893,16 +929,19 @@
 	return false
 }
 
-// FindDeclAndField returns the var/func/type/const Decl that declares
-// the identifier at pos, searching the given list of file syntax
-// trees. If pos is the position of an ast.Field or one of its Names
-// or Ellipsis.Elt, the field is returned, along with the innermost
-// enclosing Decl, which could be only loosely related---consider:
+// FindDeclInfo returns the syntax nodes involved in the declaration of the
+// types.Object with position pos, searching the given list of file syntax
+// trees.
 //
-//	var decl = f(  func(field int) {}  )
+// Pos may be the position of the name-defining identifier in a FuncDecl,
+// ValueSpec, TypeSpec, Field, or as a special case the position of
+// Ellipsis.Elt in an ellipsis field.
 //
-// It returns (nil, nil) if no Field or Decl is found at pos.
-func FindDeclAndField(files []*ast.File, pos token.Pos) (decl ast.Decl, field *ast.Field) {
+// If found, the resulting decl, spec, and field will be the inner-most
+// instance of each node type surrounding pos.
+//
+// It returns a nil decl if no object-defining node is found at pos.
+func FindDeclInfo(files []*ast.File, pos token.Pos) (decl ast.Decl, spec ast.Spec, field *ast.Field) {
 	// panic(found{}) breaks off the traversal and
 	// causes the function to return normally.
 	type found struct{}
@@ -933,33 +972,45 @@
 
 		switch n := n.(type) {
 		case *ast.Field:
-			checkField := func(f ast.Node) {
-				if f.Pos() == pos {
-					field = n
-					for i := len(stack) - 1; i >= 0; i-- {
-						if d, ok := stack[i].(ast.Decl); ok {
-							decl = d // innermost enclosing decl
-							break
-						}
+			findEnclosingDeclAndSpec := func() {
+				for i := len(stack) - 1; i >= 0; i-- {
+					switch n := stack[i].(type) {
+					case ast.Spec:
+						spec = n
+					case ast.Decl:
+						decl = n
+						return
 					}
+				}
+			}
+
+			// Check each field name since you can have
+			// multiple names for the same type expression.
+			for _, id := range n.Names {
+				if id.Pos() == pos {
+					field = n
+					findEnclosingDeclAndSpec()
 					panic(found{})
 				}
 			}
 
 			// Check *ast.Field itself. This handles embedded
 			// fields which have no associated *ast.Ident name.
-			checkField(n)
-
-			// Check each field name since you can have
-			// multiple names for the same type expression.
-			for _, name := range n.Names {
-				checkField(name)
+			if n.Pos() == pos {
+				field = n
+				findEnclosingDeclAndSpec()
+				panic(found{})
 			}
 
-			// Also check "X" in "...X". This makes it easy
-			// to format variadic signature params properly.
-			if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil {
-				checkField(ell.Elt)
+			// Also check "X" in "...X". This makes it easy to format variadic
+			// signature params properly.
+			//
+			// TODO(rfindley): I don't understand this comment. How does finding the
+			// field in this case make it easier to format variadic signature params?
+			if ell, ok := n.Type.(*ast.Ellipsis); ok && ell.Elt != nil && ell.Elt.Pos() == pos {
+				field = n
+				findEnclosingDeclAndSpec()
+				panic(found{})
 			}
 
 		case *ast.FuncDecl:
@@ -969,17 +1020,19 @@
 			}
 
 		case *ast.GenDecl:
-			for _, spec := range n.Specs {
-				switch spec := spec.(type) {
+			for _, s := range n.Specs {
+				switch s := s.(type) {
 				case *ast.TypeSpec:
-					if spec.Name.Pos() == pos {
+					if s.Name.Pos() == pos {
 						decl = n
+						spec = s
 						panic(found{})
 					}
 				case *ast.ValueSpec:
-					for _, id := range spec.Names {
+					for _, id := range s.Names {
 						if id.Pos() == pos {
 							decl = n
+							spec = s
 							panic(found{})
 						}
 					}
@@ -992,5 +1045,5 @@
 		ast.Inspect(file, f)
 	}
 
-	return nil, nil
+	return nil, nil, nil
 }
diff --git a/gopls/internal/lsp/source/identifier.go b/gopls/internal/lsp/source/identifier.go
index 9497c65..1e1bc52 100644
--- a/gopls/internal/lsp/source/identifier.go
+++ b/gopls/internal/lsp/source/identifier.go
@@ -274,9 +274,7 @@
 	if err != nil {
 		return nil, err
 	}
-	// TODO(adonovan): there's no need to inspect the entire GetSyntax() slice:
-	// we already know it's declFile.File.
-	result.Declaration.node, _ = FindDeclAndField(declPkg.GetSyntax(), declPos) // may be nil
+	result.Declaration.node, _, _ = FindDeclInfo([]*ast.File{declFile.File}, declPos) // may be nil
 	result.Declaration.nodeFile = declFile
 
 	// Ensure that we have the full declaration, in case the declaration was
diff --git a/gopls/internal/lsp/source/signature_help.go b/gopls/internal/lsp/source/signature_help.go
index 4902496..17549b8 100644
--- a/gopls/internal/lsp/source/signature_help.go
+++ b/gopls/internal/lsp/source/signature_help.go
@@ -101,7 +101,7 @@
 		if err != nil {
 			return nil, 0, err
 		}
-		node, _ := FindDeclAndField(declPkg.GetSyntax(), obj.Pos()) // may be nil
+		node, _, _ := FindDeclInfo(declPkg.GetSyntax(), obj.Pos()) // may be nil
 		d, err := FindHoverContext(ctx, snapshot, pkg, obj, node, nil)
 		if err != nil {
 			return nil, 0, err
@@ -111,7 +111,11 @@
 	} else {
 		name = "func"
 	}
-	s := NewSignature(ctx, snapshot, pkg, sig, comment, qf)
+	mq := MetadataQualifierForFile(snapshot, pgf.File, pkg.Metadata())
+	s, err := NewSignature(ctx, snapshot, pkg, pgf.File, sig, comment, qf, mq)
+	if err != nil {
+		return nil, 0, err
+	}
 	paramInfo := make([]protocol.ParameterInformation, 0, len(s.params))
 	for _, p := range s.params {
 		paramInfo = append(paramInfo, protocol.ParameterInformation{Label: p})
diff --git a/gopls/internal/lsp/source/types_format.go b/gopls/internal/lsp/source/types_format.go
index 7a2605c..379d11a 100644
--- a/gopls/internal/lsp/source/types_format.go
+++ b/gopls/internal/lsp/source/types_format.go
@@ -16,6 +16,7 @@
 	"strings"
 
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
+	"golang.org/x/tools/internal/bug"
 	"golang.org/x/tools/internal/event"
 	"golang.org/x/tools/internal/event/tag"
 	"golang.org/x/tools/internal/typeparams"
@@ -193,7 +194,7 @@
 }
 
 // NewSignature returns formatted signature for a types.Signature struct.
-func NewSignature(ctx context.Context, s Snapshot, pkg Package, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier) *signature {
+func NewSignature(ctx context.Context, s Snapshot, pkg Package, srcFile *ast.File, sig *types.Signature, comment *ast.CommentGroup, qf types.Qualifier, mq MetadataQualifier) (*signature, error) {
 	var tparams []string
 	tpList := typeparams.ForSignature(sig)
 	for i := 0; i < tpList.Len(); i++ {
@@ -206,7 +207,10 @@
 	params := make([]string, 0, sig.Params().Len())
 	for i := 0; i < sig.Params().Len(); i++ {
 		el := sig.Params().At(i)
-		typ := FormatVarType(ctx, s, pkg, el, qf)
+		typ, err := FormatVarType(ctx, s, pkg, srcFile, el, qf, mq)
+		if err != nil {
+			return nil, err
+		}
 		p := typ
 		if el.Name() != "" {
 			p = el.Name() + " " + typ
@@ -221,7 +225,10 @@
 			needResultParens = true
 		}
 		el := sig.Results().At(i)
-		typ := FormatVarType(ctx, s, pkg, el, qf)
+		typ, err := FormatVarType(ctx, s, pkg, srcFile, el, qf, mq)
+		if err != nil {
+			return nil, err
+		}
 		if el.Name() == "" {
 			results = append(results, typ)
 		} else {
@@ -248,170 +255,239 @@
 		results:          results,
 		variadic:         sig.Variadic(),
 		needResultParens: needResultParens,
-	}
+	}, nil
 }
 
 // FormatVarType formats a *types.Var, accounting for type aliases.
 // To do this, it looks in the AST of the file in which the object is declared.
 // On any errors, it always falls back to types.TypeString.
-func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, obj *types.Var, qf types.Qualifier) string {
-	pkg, err := FindPackageFromPos(ctx, snapshot, srcpkg, obj.Pos())
-	if err != nil {
-		return types.TypeString(obj.Type(), qf)
+//
+// TODO(rfindley): this function could return the actual name used in syntax,
+// for better parameter names.
+func FormatVarType(ctx context.Context, snapshot Snapshot, srcpkg Package, srcFile *ast.File, obj *types.Var, qf types.Qualifier, mq MetadataQualifier) (string, error) {
+	// TODO(rfindley): This looks wrong. The previous comment said:
+	// "If the given expr refers to a type parameter, then use the
+	// object's Type instead of the type parameter declaration. This helps
+	// format the instantiated type as opposed to the original undeclared
+	// generic type".
+	//
+	// But of course, if obj is a type param, we are formatting a generic type
+	// and not an instantiated type. Handling for instantiated types must be done
+	// at a higher level.
+	//
+	// Left this during refactoring in order to preserve pre-existing logic.
+	if typeparams.IsTypeParam(obj.Type()) {
+		return types.TypeString(obj.Type(), qf), nil
 	}
 
-	_, field := FindDeclAndField(pkg.GetSyntax(), obj.Pos())
+	if obj.Pkg() == nil || !obj.Pos().IsValid() {
+		// This is defensive, though it is extremely unlikely we'll ever have a
+		// builtin var.
+		return types.TypeString(obj.Type(), qf), nil
+	}
+
+	targetpgf, pos, err := parseFull(ctx, snapshot, srcpkg, obj.Pos())
+	if err != nil {
+		return "", err // e.g. ctx cancelled
+	}
+
+	targetMeta := findFileInDepsMetadata(snapshot, srcpkg.Metadata(), targetpgf.URI)
+	if targetMeta == nil {
+		// If we have an object from type-checking, it should exist in a file in
+		// the forward transitive closure.
+		return "", bug.Errorf("failed to find file %q in deps of %q", targetpgf.URI, srcpkg.Metadata().ID)
+	}
+
+	decl, spec, field := FindDeclInfo([]*ast.File{targetpgf.File}, pos)
+
+	// We can't handle type parameters correctly, so we fall back on TypeString
+	// for parameterized decls.
+	if decl, _ := decl.(*ast.FuncDecl); decl != nil {
+		if typeparams.ForFuncType(decl.Type).NumFields() > 0 {
+			return types.TypeString(obj.Type(), qf), nil // in generic function
+		}
+		if decl.Recv != nil && len(decl.Recv.List) > 0 {
+			if x, _, _, _ := typeparams.UnpackIndexExpr(decl.Recv.List[0].Type); x != nil {
+				return types.TypeString(obj.Type(), qf), nil // in method of generic type
+			}
+		}
+	}
+	if spec, _ := spec.(*ast.TypeSpec); spec != nil && typeparams.ForTypeSpec(spec).NumFields() > 0 {
+		return types.TypeString(obj.Type(), qf), nil // in generic type decl
+	}
+
 	if field == nil {
-		return types.TypeString(obj.Type(), qf)
+		// TODO(rfindley): we should never reach here from an ordinary var, so
+		// should probably return an error here.
+		return types.TypeString(obj.Type(), qf), nil
 	}
 	expr := field.Type
 
-	// If the given expr refers to a type parameter, then use the
-	// object's Type instead of the type parameter declaration. This helps
-	// format the instantiated type as opposed to the original undeclared
-	// generic type.
-	if typeparams.IsTypeParam(pkg.GetTypesInfo().Types[expr].Type) {
-		return types.TypeString(obj.Type(), qf)
-	}
+	rq := requalifier(snapshot, targetpgf.File, targetMeta, mq)
 
 	// The type names in the AST may not be correctly qualified.
 	// Determine the package name to use based on the package that originated
 	// the query and the package in which the type is declared.
 	// We then qualify the value by cloning the AST node and editing it.
-	clonedInfo := make(map[token.Pos]*types.PkgName)
-	qualified := cloneExpr(expr, pkg.GetTypesInfo(), clonedInfo)
+	expr = qualifyTypeExpr(expr, rq)
 
 	// If the request came from a different package than the one in which the
 	// types are defined, we may need to modify the qualifiers.
-	qualified = qualifyExpr(qualified, srcpkg, pkg, clonedInfo, qf)
-	fmted := FormatNode(srcpkg.FileSet(), qualified)
-	return fmted
+	return FormatNodeFile(targetpgf.Tok, expr), nil
 }
 
-// qualifyExpr applies the "pkgName." prefix to any *ast.Ident in the expr.
-func qualifyExpr(expr ast.Expr, srcpkg, pkg Package, clonedInfo map[token.Pos]*types.PkgName, qf types.Qualifier) ast.Expr {
-	ast.Inspect(expr, func(n ast.Node) bool {
-		switch n := n.(type) {
-		case *ast.ArrayType, *ast.ChanType, *ast.Ellipsis,
-			*ast.FuncType, *ast.MapType, *ast.ParenExpr,
-			*ast.StarExpr, *ast.StructType, *ast.FieldList, *ast.Field:
-			// These are the only types that are cloned by cloneExpr below,
-			// so these are the only types that we can traverse and potentially
-			// modify. This is not an ideal approach, but it works for now.
-
-			// TODO(rFindley): can we eliminate this filtering entirely? This caused
-			// bugs in the past (golang/go#50539)
-			return true
-		case *ast.SelectorExpr:
-			// We may need to change any selectors in which the X is a package
-			// name and the Sel is exported.
-			x, ok := n.X.(*ast.Ident)
-			if !ok {
-				return false
-			}
-			obj, ok := clonedInfo[x.Pos()]
-			if !ok {
-				return false
-			}
-			x.Name = qf(obj.Imported())
-			return false
-		case *ast.Ident:
-			if srcpkg == pkg {
-				return false
-			}
-			// Only add the qualifier if the identifier is exported.
-			if ast.IsExported(n.Name) {
-				pkgName := qf(pkg.GetTypes())
-				n.Name = pkgName + "." + n.Name
-			}
-		}
-		return false
-	})
-	return expr
-}
-
-// cloneExpr only clones expressions that appear in the parameters or return
-// values of a function declaration. The original expression may be returned
-// to the caller in 2 cases:
+// qualifyTypeExpr clones the type expression expr after re-qualifying type
+// names using the given function, which accepts the current syntactic
+// qualifier (possibly "" for unqualified idents), and returns a new qualifier
+// (again, possibly "" if the identifier should be unqualified).
 //
-//  1. The expression has no pointer fields.
-//  2. The expression cannot appear in an *ast.FuncType, making it
-//     unnecessary to clone.
+// The resulting expression may be inaccurate: without type-checking we don't
+// properly account for "." imported identifiers or builtins.
 //
-// This function also keeps track of selector expressions in which the X is a
-// package name and marks them in a map along with their type information, so
-// that this information can be used when rewriting the expression.
-//
-// NOTE: This function is tailored to the use case of qualifyExpr, and should
-// be used with caution.
-func cloneExpr(expr ast.Expr, info *types.Info, clonedInfo map[token.Pos]*types.PkgName) ast.Expr {
+// TODO(rfindley): add many more tests for this function.
+func qualifyTypeExpr(expr ast.Expr, qf func(string) string) ast.Expr {
 	switch expr := expr.(type) {
 	case *ast.ArrayType:
 		return &ast.ArrayType{
 			Lbrack: expr.Lbrack,
-			Elt:    cloneExpr(expr.Elt, info, clonedInfo),
+			Elt:    qualifyTypeExpr(expr.Elt, qf),
 			Len:    expr.Len,
 		}
+
+	case *ast.BinaryExpr:
+		if expr.Op != token.OR {
+			return expr
+		}
+		return &ast.BinaryExpr{
+			X:     qualifyTypeExpr(expr.X, qf),
+			OpPos: expr.OpPos,
+			Op:    expr.Op,
+			Y:     qualifyTypeExpr(expr.Y, qf),
+		}
+
 	case *ast.ChanType:
 		return &ast.ChanType{
 			Arrow: expr.Arrow,
 			Begin: expr.Begin,
 			Dir:   expr.Dir,
-			Value: cloneExpr(expr.Value, info, clonedInfo),
+			Value: qualifyTypeExpr(expr.Value, qf),
 		}
+
 	case *ast.Ellipsis:
 		return &ast.Ellipsis{
 			Ellipsis: expr.Ellipsis,
-			Elt:      cloneExpr(expr.Elt, info, clonedInfo),
+			Elt:      qualifyTypeExpr(expr.Elt, qf),
 		}
+
 	case *ast.FuncType:
 		return &ast.FuncType{
 			Func:    expr.Func,
-			Params:  cloneFieldList(expr.Params, info, clonedInfo),
-			Results: cloneFieldList(expr.Results, info, clonedInfo),
+			Params:  qualifyFieldList(expr.Params, qf),
+			Results: qualifyFieldList(expr.Results, qf),
 		}
+
 	case *ast.Ident:
-		return cloneIdent(expr)
+		// Unqualified type (builtin, package local, or dot-imported).
+
+		// Don't qualify names that look like builtins.
+		//
+		// Without type-checking this may be inaccurate. It could be made accurate
+		// by doing syntactic object resolution for the entire package, but that
+		// does not seem worthwhile and we generally want to avoid using
+		// ast.Object, which may be inaccurate.
+		if obj := types.Universe.Lookup(expr.Name); obj != nil {
+			return expr
+		}
+
+		newName := qf("")
+		if newName != "" {
+			return &ast.SelectorExpr{
+				X: &ast.Ident{
+					NamePos: expr.Pos(),
+					Name:    newName,
+				},
+				Sel: expr,
+			}
+		}
+		return expr
+
+	case *ast.IndexExpr:
+		return &ast.IndexExpr{
+			X:      qualifyTypeExpr(expr.X, qf),
+			Lbrack: expr.Lbrack,
+			Index:  qualifyTypeExpr(expr.Index, qf),
+			Rbrack: expr.Rbrack,
+		}
+
+	case *typeparams.IndexListExpr:
+		indices := make([]ast.Expr, len(expr.Indices))
+		for i, idx := range expr.Indices {
+			indices[i] = qualifyTypeExpr(idx, qf)
+		}
+		return &typeparams.IndexListExpr{
+			X:       qualifyTypeExpr(expr.X, qf),
+			Lbrack:  expr.Lbrack,
+			Indices: indices,
+			Rbrack:  expr.Rbrack,
+		}
+
+	case *ast.InterfaceType:
+		return &ast.InterfaceType{
+			Interface:  expr.Interface,
+			Methods:    qualifyFieldList(expr.Methods, qf),
+			Incomplete: expr.Incomplete,
+		}
+
 	case *ast.MapType:
 		return &ast.MapType{
 			Map:   expr.Map,
-			Key:   cloneExpr(expr.Key, info, clonedInfo),
-			Value: cloneExpr(expr.Value, info, clonedInfo),
+			Key:   qualifyTypeExpr(expr.Key, qf),
+			Value: qualifyTypeExpr(expr.Value, qf),
 		}
+
 	case *ast.ParenExpr:
 		return &ast.ParenExpr{
 			Lparen: expr.Lparen,
 			Rparen: expr.Rparen,
-			X:      cloneExpr(expr.X, info, clonedInfo),
+			X:      qualifyTypeExpr(expr.X, qf),
 		}
+
 	case *ast.SelectorExpr:
-		s := &ast.SelectorExpr{
-			Sel: cloneIdent(expr.Sel),
-			X:   cloneExpr(expr.X, info, clonedInfo),
-		}
-		if x, ok := expr.X.(*ast.Ident); ok && ast.IsExported(expr.Sel.Name) {
-			if obj, ok := info.ObjectOf(x).(*types.PkgName); ok {
-				clonedInfo[s.X.Pos()] = obj
+		if id, ok := expr.X.(*ast.Ident); ok {
+			// qualified type
+			newName := qf(id.Name)
+			if newName == "" {
+				return expr.Sel
+			}
+			return &ast.SelectorExpr{
+				X: &ast.Ident{
+					NamePos: id.NamePos,
+					Name:    newName,
+				},
+				Sel: expr.Sel,
 			}
 		}
-		return s
+		return expr
+
 	case *ast.StarExpr:
 		return &ast.StarExpr{
 			Star: expr.Star,
-			X:    cloneExpr(expr.X, info, clonedInfo),
+			X:    qualifyTypeExpr(expr.X, qf),
 		}
+
 	case *ast.StructType:
 		return &ast.StructType{
 			Struct:     expr.Struct,
-			Fields:     cloneFieldList(expr.Fields, info, clonedInfo),
+			Fields:     qualifyFieldList(expr.Fields, qf),
 			Incomplete: expr.Incomplete,
 		}
+
 	default:
 		return expr
 	}
 }
 
-func cloneFieldList(fl *ast.FieldList, info *types.Info, clonedInfo map[token.Pos]*types.PkgName) *ast.FieldList {
+func qualifyFieldList(fl *ast.FieldList, qf func(string) string) *ast.FieldList {
 	if fl == nil {
 		return nil
 	}
@@ -423,16 +499,12 @@
 	}
 	list := make([]*ast.Field, 0, len(fl.List))
 	for _, f := range fl.List {
-		var names []*ast.Ident
-		for _, n := range f.Names {
-			names = append(names, cloneIdent(n))
-		}
 		list = append(list, &ast.Field{
 			Comment: f.Comment,
 			Doc:     f.Doc,
-			Names:   names,
+			Names:   f.Names,
 			Tag:     f.Tag,
-			Type:    cloneExpr(f.Type, info, clonedInfo),
+			Type:    qualifyTypeExpr(f.Type, qf),
 		})
 	}
 	return &ast.FieldList{
@@ -441,11 +513,3 @@
 		List:    list,
 	}
 }
-
-func cloneIdent(ident *ast.Ident) *ast.Ident {
-	return &ast.Ident{
-		NamePos: ident.NamePos,
-		Name:    ident.Name,
-		Obj:     ident.Obj,
-	}
-}
diff --git a/gopls/internal/lsp/source/util.go b/gopls/internal/lsp/source/util.go
index c9a5102..6a8dc26 100644
--- a/gopls/internal/lsp/source/util.go
+++ b/gopls/internal/lsp/source/util.go
@@ -20,6 +20,8 @@
 	"golang.org/x/tools/gopls/internal/lsp/protocol"
 	"golang.org/x/tools/gopls/internal/lsp/safetoken"
 	"golang.org/x/tools/gopls/internal/span"
+	"golang.org/x/tools/internal/bug"
+	"golang.org/x/tools/internal/tokeninternal"
 	"golang.org/x/tools/internal/typeparams"
 )
 
@@ -161,6 +163,24 @@
 	return buf.String()
 }
 
+// FormatNodeFile is like FormatNode, but requires only the token.File for the
+// syntax containing the given ast node.
+func FormatNodeFile(file *token.File, n ast.Node) string {
+	fset := SingletonFileSet(file)
+	return FormatNode(fset, n)
+}
+
+// SingletonFileSet creates a new token.FileSet containing a file that is
+// identical to f (same base, size, and line), for use in APIs that require a
+// FileSet.
+func SingletonFileSet(f *token.File) *token.FileSet {
+	fset := token.NewFileSet()
+	f2 := fset.AddFile(f.Name(), f.Base(), f.Size())
+	lines := tokeninternal.GetLines(f)
+	f2.SetLines(lines)
+	return fset
+}
+
 // Deref returns a pointer's element type, traversing as many levels as needed.
 // Otherwise it returns typ.
 //
@@ -236,13 +256,46 @@
 	return nil, nil, fmt.Errorf("no file for %s in deps of package %s", uri, pkg.Metadata().ID)
 }
 
+// findFileInDepsMetadata finds package metadata containing URI in the
+// transitive dependencies of m. When using the Go command, the answer is
+// unique.
+//
+// TODO(rfindley): refactor to share logic with findPackageInDeps?
+func findFileInDepsMetadata(s MetadataSource, m *Metadata, uri span.URI) *Metadata {
+	seen := make(map[PackageID]bool)
+	var search func(*Metadata) *Metadata
+	search = func(m *Metadata) *Metadata {
+		if seen[m.ID] {
+			return nil
+		}
+		seen[m.ID] = true
+		for _, cgf := range m.CompiledGoFiles {
+			if cgf == uri {
+				return m
+			}
+		}
+		for _, dep := range m.DepsByPkgPath {
+			m := s.Metadata(dep)
+			if m == nil {
+				bug.Reportf("nil metadata for %q", dep)
+				continue
+			}
+			if found := search(m); found != nil {
+				return found
+			}
+		}
+		return nil
+	}
+	return search(m)
+}
+
 // recursiveDeps finds unique transitive dependencies of m, including m itself.
 //
 // Invariant: for the resulting slice res, res[0] == m.ID.
 //
 // TODO(rfindley): consider replacing this with a snapshot.ForwardDependencies
 // method, or exposing the metadata graph itself.
-func recursiveDeps(s interface{ Metadata(PackageID) *Metadata }, m *Metadata) []PackageID {
+func recursiveDeps(s MetadataSource, m *Metadata) []PackageID {
 	seen := make(map[PackageID]bool)
 	var ids []PackageID
 	var add func(*Metadata)
@@ -254,6 +307,10 @@
 		ids = append(ids, m.ID)
 		for _, dep := range m.DepsByPkgPath {
 			m := s.Metadata(dep)
+			if m == nil {
+				bug.Reportf("nil metadata for %q", dep)
+				continue
+			}
 			add(m)
 		}
 	}
@@ -329,6 +386,134 @@
 	}
 }
 
+// requalifier returns a function that re-qualifies identifiers and qualified
+// identifiers contained in targetFile using the given metadata qualifier.
+func requalifier(s MetadataSource, targetFile *ast.File, targetMeta *Metadata, mq MetadataQualifier) func(string) string {
+	qm := map[string]string{
+		"": mq(targetMeta.Name, "", targetMeta.PkgPath),
+	}
+
+	// Construct mapping of import paths to their defined or implicit names.
+	for _, imp := range targetFile.Imports {
+		name, pkgName, impPath, pkgPath := importInfo(s, imp, targetMeta)
+
+		// Re-map the target name for the source file.
+		qm[name] = mq(pkgName, impPath, pkgPath)
+	}
+
+	return func(name string) string {
+		if newName, ok := qm[name]; ok {
+			return newName
+		}
+		return name
+	}
+}
+
+// A MetadataQualifier is a function that qualifies an identifier declared in a
+// package with the given package name, import path, and package path.
+//
+// In scenarios where metadata is missing the provided PackageName and
+// PackagePath may be empty, but ImportPath must always be non-empty.
+type MetadataQualifier func(PackageName, ImportPath, PackagePath) string
+
+// MetadataQualifierForFile returns a metadata qualifier that chooses the best
+// qualification of an imported package relative to the file f in package with
+// metadata m.
+func MetadataQualifierForFile(s MetadataSource, f *ast.File, m *Metadata) MetadataQualifier {
+	// Record local names for import paths.
+	localNames := make(map[ImportPath]string) // local names for imports in f
+	for _, imp := range f.Imports {
+		name, _, impPath, _ := importInfo(s, imp, m)
+		localNames[impPath] = name
+	}
+
+	// Record a package path -> import path mapping.
+	inverseDeps := make(map[PackageID]PackagePath)
+	for path, id := range m.DepsByPkgPath {
+		inverseDeps[id] = path
+	}
+	importsByPkgPath := make(map[PackagePath]ImportPath) // best import paths by pkgPath
+	for impPath, id := range m.DepsByImpPath {
+		if id == "" {
+			continue
+		}
+		pkgPath := inverseDeps[id]
+		_, hasPath := importsByPkgPath[pkgPath]
+		_, hasImp := localNames[impPath]
+		// In rare cases, there may be multiple import paths with the same package
+		// path. In such scenarios, prefer an import path that already exists in
+		// the file.
+		if !hasPath || hasImp {
+			importsByPkgPath[pkgPath] = impPath
+		}
+	}
+
+	return func(pkgName PackageName, impPath ImportPath, pkgPath PackagePath) string {
+		// If supplied, translate the package path to an import path in the source
+		// package.
+		if pkgPath != "" {
+			if srcImp := importsByPkgPath[pkgPath]; srcImp != "" {
+				impPath = srcImp
+			}
+			if pkgPath == m.PkgPath {
+				return ""
+			}
+		}
+		if localName, ok := localNames[impPath]; ok && impPath != "" {
+			return string(localName)
+		}
+		if pkgName != "" {
+			return string(pkgName)
+		}
+		idx := strings.LastIndexByte(string(impPath), '/')
+		return string(impPath[idx+1:])
+	}
+}
+
+// importInfo collects information about the import specified by imp,
+// extracting its file-local name, package name, import path, and package path.
+//
+// If metadata is missing for the import, the resulting package name and
+// package path may be empty, and the file local name may be guessed based on
+// the import path.
+//
+// Note: previous versions of this helper used a PackageID->PackagePath map
+// extracted from m, for extracting package path even in the case where
+// metadata for a dep was missing. This should not be necessary, as we should
+// always have metadata for IDs contained in DepsByPkgPath.
+func importInfo(s MetadataSource, imp *ast.ImportSpec, m *Metadata) (string, PackageName, ImportPath, PackagePath) {
+	var (
+		name    string // local name
+		pkgName PackageName
+		impPath = UnquoteImportPath(imp)
+		pkgPath PackagePath
+	)
+
+	// If the import has a local name, use it.
+	if imp.Name != nil {
+		name = imp.Name.Name
+	}
+
+	// Try to find metadata for the import. If successful and there is no local
+	// name, the package name is the local name.
+	if depID := m.DepsByImpPath[impPath]; depID != "" {
+		if depm := s.Metadata(depID); depm != nil {
+			if name == "" {
+				name = string(depm.Name)
+			}
+			pkgName = depm.Name
+			pkgPath = depm.PkgPath
+		}
+	}
+
+	// If the local name is still unknown, guess it based on the import path.
+	if name == "" {
+		idx := strings.LastIndexByte(string(impPath), '/')
+		name = string(impPath[idx+1:])
+	}
+	return name, pkgName, impPath, pkgPath
+}
+
 // isDirective reports whether c is a comment directive.
 //
 // Copied and adapted from go/src/go/ast/ast.go.
diff --git a/gopls/internal/lsp/source/view.go b/gopls/internal/lsp/source/view.go
index 8798e1b..b86c3cc 100644
--- a/gopls/internal/lsp/source/view.go
+++ b/gopls/internal/lsp/source/view.go
@@ -354,14 +354,22 @@
 	GoVersionString() string
 }
 
-// A FileSource maps uris to FileHandles. This abstraction exists both for
-// testability, and so that algorithms can be run equally on session and
-// snapshot files.
+// A FileSource maps uris to FileHandles.
 type FileSource interface {
 	// GetFile returns the FileHandle for a given URI.
 	GetFile(ctx context.Context, uri span.URI) (FileHandle, error)
 }
 
+// A MetadataSource maps package IDs to metadata.
+//
+// TODO(rfindley): replace this with a concrete metadata graph, once it is
+// exposed from the snapshot.
+type MetadataSource interface {
+	// Metadata returns Metadata for the given package ID, or nil if it does not
+	// exist.
+	Metadata(PackageID) *Metadata
+}
+
 // A ParsedGoFile contains the results of parsing a Go file.
 type ParsedGoFile struct {
 	URI  span.URI
diff --git a/gopls/internal/regtest/completion/completion_test.go b/gopls/internal/regtest/completion/completion_test.go
index 7d9214f..753cd68 100644
--- a/gopls/internal/regtest/completion/completion_test.go
+++ b/gopls/internal/regtest/completion/completion_test.go
@@ -482,7 +482,7 @@
 			t.Error(diff)
 		}
 		if completions.Items[0].Tags == nil {
-			t.Errorf("expected Tags to show deprecation %#v", diff[0])
+			t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags)
 		}
 		loc = env.RegexpSearch("prog.go", "= badP")
 		loc.Range.Start.Character += uint32(protocol.UTF16Len([]byte("= badP")))
@@ -492,7 +492,7 @@
 			t.Error(diff)
 		}
 		if completions.Items[0].Tags == nil {
-			t.Errorf("expected Tags to show deprecation %#v", diff[0])
+			t.Errorf("expected Tags to show deprecation %#v", completions.Items[0].Tags)
 		}
 	})
 }
diff --git a/internal/gcimporter/iexport.go b/internal/gcimporter/iexport.go
index 8bd51ec..ba53cdc 100644
--- a/internal/gcimporter/iexport.go
+++ b/internal/gcimporter/iexport.go
@@ -21,9 +21,8 @@
 	"sort"
 	"strconv"
 	"strings"
-	"sync"
-	"unsafe"
 
+	"golang.org/x/tools/internal/tokeninternal"
 	"golang.org/x/tools/internal/typeparams"
 )
 
@@ -210,7 +209,7 @@
 	// Sort the set of needed offsets. Duplicates are harmless.
 	sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] })
 
-	lines := getLines(file) // byte offset of each line start
+	lines := tokeninternal.GetLines(file) // byte offset of each line start
 	w.uint64(uint64(len(lines)))
 
 	// Rather than record the entire array of line start offsets,
@@ -1179,50 +1178,3 @@
 	q.head++
 	return obj
 }
-
-// getLines returns the table of line-start offsets from a token.File.
-func getLines(file *token.File) []int {
-	// Use this variant once proposal #57708 is implemented:
-	//
-	// if file, ok := file.(interface{ Lines() []int }); ok {
-	// 	return file.Lines()
-	// }
-
-	// This declaration must match that of token.File.
-	// This creates a risk of dependency skew.
-	// For now we check that the size of the two
-	// declarations is the same, on the (fragile) assumption
-	// that future changes would add fields.
-	type tokenFile119 struct {
-		_     string
-		_     int
-		_     int
-		mu    sync.Mutex // we're not complete monsters
-		lines []int
-		_     []struct{}
-	}
-	type tokenFile118 struct {
-		_ *token.FileSet // deleted in go1.19
-		tokenFile119
-	}
-
-	type uP = unsafe.Pointer
-	switch unsafe.Sizeof(*file) {
-	case unsafe.Sizeof(tokenFile118{}):
-		var ptr *tokenFile118
-		*(*uP)(uP(&ptr)) = uP(file)
-		ptr.mu.Lock()
-		defer ptr.mu.Unlock()
-		return ptr.lines
-
-	case unsafe.Sizeof(tokenFile119{}):
-		var ptr *tokenFile119
-		*(*uP)(uP(&ptr)) = uP(file)
-		ptr.mu.Lock()
-		defer ptr.mu.Unlock()
-		return ptr.lines
-
-	default:
-		panic("unexpected token.File size")
-	}
-}
diff --git a/internal/tokeninternal/tokeninternal.go b/internal/tokeninternal/tokeninternal.go
new file mode 100644
index 0000000..a3fb2d4
--- /dev/null
+++ b/internal/tokeninternal/tokeninternal.go
@@ -0,0 +1,59 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// package tokeninternal provides access to some internal features of the token
+// package.
+package tokeninternal
+
+import (
+	"go/token"
+	"sync"
+	"unsafe"
+)
+
+// GetLines returns the table of line-start offsets from a token.File.
+func GetLines(file *token.File) []int {
+	// token.File has a Lines method on Go 1.21 and later.
+	if file, ok := (interface{})(file).(interface{ Lines() []int }); ok {
+		return file.Lines()
+	}
+
+	// This declaration must match that of token.File.
+	// This creates a risk of dependency skew.
+	// For now we check that the size of the two
+	// declarations is the same, on the (fragile) assumption
+	// that future changes would add fields.
+	type tokenFile119 struct {
+		_     string
+		_     int
+		_     int
+		mu    sync.Mutex // we're not complete monsters
+		lines []int
+		_     []struct{}
+	}
+	type tokenFile118 struct {
+		_ *token.FileSet // deleted in go1.19
+		tokenFile119
+	}
+
+	type uP = unsafe.Pointer
+	switch unsafe.Sizeof(*file) {
+	case unsafe.Sizeof(tokenFile118{}):
+		var ptr *tokenFile118
+		*(*uP)(uP(&ptr)) = uP(file)
+		ptr.mu.Lock()
+		defer ptr.mu.Unlock()
+		return ptr.lines
+
+	case unsafe.Sizeof(tokenFile119{}):
+		var ptr *tokenFile119
+		*(*uP)(uP(&ptr)) = uP(file)
+		ptr.mu.Lock()
+		defer ptr.mu.Unlock()
+		return ptr.lines
+
+	default:
+		panic("unexpected token.File size")
+	}
+}